Merge branch '5.1.x'

This commit is contained in:
Rossen Stoyanchev 2019-04-16 21:08:54 -04:00
commit de3238dbea
15 changed files with 112 additions and 141 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -16,6 +16,7 @@
package org.springframework.core.codec;
import java.time.Duration;
import java.util.Map;
import java.util.function.Consumer;
@ -32,6 +33,8 @@ import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.MimeType;
import static org.junit.Assert.*;
/**
* Abstract base class for {@link Decoder} unit tests. Subclasses need to implement
* {@link #canDecode()}, {@link #decode()} and {@link #decodeToMono()}, possibly using the wide
@ -99,6 +102,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
*/
protected <T> void testDecodeAll(Publisher<DataBuffer> input, Class<? extends T> outputClass,
Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecodeAll(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
@ -122,6 +126,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
protected <T> void testDecodeAll(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
testDecode(input, outputType, stepConsumer, mimeType, hints);
testDecodeError(input, outputType, mimeType, hints);
testDecodeCancel(input, outputType, mimeType, hints);
@ -151,6 +156,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
*/
protected <T> void testDecode(Publisher<DataBuffer> input, Class<? extends T> outputClass,
Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecode(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
@ -202,16 +208,14 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
protected void testDecodeError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Flux.concat(
Flux.from(input).take(1),
Flux.error(new InputException()));
Flux<?> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result)
.expectNextCount(1)
.expectError(InputException.class)
.verify();
input = Mono.from(input).concatWith(Flux.error(new InputException()));
try {
this.decoder.decode(input, outputType, mimeType, hints).blockLast(Duration.ofSeconds(5));
fail();
}
catch (InputException ex) {
// expected
}
}
/**
@ -229,11 +233,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<?> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result)
.expectNextCount(1)
.thenCancel()
.verify();
StepVerifier.create(result).expectNextCount(1).thenCancel().verify();
}
/**
@ -249,9 +249,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
Flux<DataBuffer> input = Flux.empty();
Flux<?> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result)
.verifyComplete();
StepVerifier.create(result).verifyComplete();
}
// Mono
@ -297,6 +295,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
protected <T> void testDecodeToMonoAll(Publisher<DataBuffer> input, ResolvableType outputType,
Consumer<StepVerifier.FirstStep<T>> stepConsumer,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
testDecodeToMono(input, outputType, stepConsumer, mimeType, hints);
testDecodeToMonoError(input, outputType, mimeType, hints);
testDecodeToMonoCancel(input, outputType, mimeType, hints);
@ -326,6 +325,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
*/
protected <T> void testDecodeToMono(Publisher<DataBuffer> input,
Class<? extends T> outputClass, Consumer<StepVerifier.FirstStep<T>> stepConsumer) {
testDecodeToMono(input, ResolvableType.forClass(outputClass), stepConsumer, null, null);
}
@ -377,15 +377,9 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
protected void testDecodeToMonoError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Flux.concat(
Flux.from(input).take(1),
Flux.error(new InputException()));
input = Mono.from(input).concatWith(Flux.error(new InputException()));
Mono<?> result = this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.create(result)
.expectError(InputException.class)
.verify();
StepVerifier.create(result).expectError(InputException.class).verify();
}
/**
@ -401,10 +395,7 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Mono<?> result = this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.create(result)
.thenCancel()
.verify();
StepVerifier.create(result).thenCancel().verify();
}
/**
@ -418,11 +409,8 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
protected void testDecodeToMonoEmpty(ResolvableType outputType, @Nullable MimeType mimeType,
@Nullable Map<String, Object> hints) {
Flux<DataBuffer> input = Flux.empty();
Mono<?> result = this.decoder.decodeToMono(input, outputType, mimeType, hints);
StepVerifier.create(result)
.verifyComplete();
Mono<?> result = this.decoder.decodeToMono(Flux.empty(), outputType, mimeType, hints);
StepVerifier.create(result).verifyComplete();
}
/**
@ -431,10 +419,10 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
* @return the deferred buffer
*/
protected Mono<DataBuffer> dataBuffer(byte[] bytes) {
return Mono.defer(() -> {
return Mono.fromCallable(() -> {
DataBuffer dataBuffer = this.bufferFactory.allocateBuffer(bytes.length);
dataBuffer.write(bytes);
return Mono.just(dataBuffer);
return dataBuffer;
});
}
@ -442,9 +430,6 @@ public abstract class AbstractDecoderTestCase<D extends Decoder<?>>
* Exception used in {@link #testDecodeError} and {@link #testDecodeToMonoError}
*/
@SuppressWarnings("serial")
public static class InputException extends RuntimeException {
}
public static class InputException extends RuntimeException {}
}

View File

@ -19,25 +19,20 @@ package org.springframework.core.codec;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Map;
import org.junit.Test;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import org.springframework.core.ResolvableType;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.lang.Nullable;
import org.springframework.util.MimeType;
import org.springframework.util.MimeTypeUtils;
import org.springframework.util.StreamUtils;
import static org.junit.Assert.*;
import static org.springframework.core.ResolvableType.forClass;
import static org.springframework.core.ResolvableType.*;
/**
* @author Arjen Poutsma
@ -67,9 +62,7 @@ public class ResourceDecoderTests extends AbstractDecoderTestCase<ResourceDecode
@Override
@Test
public void decode() {
Flux<DataBuffer> input = Flux.concat(
dataBuffer(this.fooBytes),
dataBuffer(this.barBytes));
Flux<DataBuffer> input = Flux.concat(dataBuffer(this.fooBytes), dataBuffer(this.barBytes));
testDecodeAll(input, Resource.class, step -> step
.consumeNextWith(resource -> {
@ -85,21 +78,6 @@ public class ResourceDecoderTests extends AbstractDecoderTestCase<ResourceDecode
.verify());
}
@Override
protected void testDecodeError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Flux.concat(
Flux.from(input).take(1),
Flux.error(new InputException()));
Flux<Resource> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result)
.expectError(InputException.class)
.verify();
}
@Override
public void decodeToMono() {
Flux<DataBuffer> input = Flux.concat(

View File

@ -19,7 +19,6 @@ package org.springframework.core.codec;
import java.util.Collections;
import java.util.function.Consumer;
import io.netty.buffer.PooledByteBufAllocator;
import org.junit.After;
import org.junit.Test;
import org.reactivestreams.Subscription;
@ -34,7 +33,6 @@ import org.springframework.core.io.Resource;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.core.io.buffer.LeakAwareDataBufferFactory;
import org.springframework.core.io.buffer.NettyDataBufferFactory;
import org.springframework.core.io.buffer.support.DataBufferTestUtils;
import org.springframework.core.io.support.ResourceRegion;
import org.springframework.util.MimeType;
@ -51,8 +49,7 @@ public class ResourceRegionEncoderTests {
private ResourceRegionEncoder encoder = new ResourceRegionEncoder();
private LeakAwareDataBufferFactory bufferFactory =
new LeakAwareDataBufferFactory(new NettyDataBufferFactory(PooledByteBufAllocator.DEFAULT));
private LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory();
@After

View File

@ -1,5 +1,5 @@
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -19,18 +19,16 @@ package org.springframework.core.codec;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import org.springframework.core.ResolvableType;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.lang.Nullable;
import org.springframework.util.MimeType;
import org.springframework.util.MimeTypeUtils;
@ -61,10 +59,8 @@ public class StringDecoderTests extends AbstractDecoderTestCase<StringDecoder> {
assertTrue(this.decoder.canDecode(TYPE, MimeTypeUtils.TEXT_HTML));
assertTrue(this.decoder.canDecode(TYPE, MimeTypeUtils.APPLICATION_JSON));
assertTrue(this.decoder.canDecode(TYPE, MimeTypeUtils.parseMimeType("text/plain;charset=utf-8")));
assertFalse(this.decoder.canDecode(
ResolvableType.forClass(Integer.class), MimeTypeUtils.TEXT_PLAIN));
assertFalse(this.decoder.canDecode(
ResolvableType.forClass(Object.class), MimeTypeUtils.APPLICATION_JSON));
assertFalse(this.decoder.canDecode(ResolvableType.forClass(Integer.class), MimeTypeUtils.TEXT_PLAIN));
assertFalse(this.decoder.canDecode(ResolvableType.forClass(Object.class), MimeTypeUtils.APPLICATION_JSON));
}
@Override
@ -76,24 +72,7 @@ public class StringDecoderTests extends AbstractDecoderTestCase<StringDecoder> {
String s = String.format("%s\n%s\n%s", u, e, o);
Flux<DataBuffer> input = toDataBuffers(s, 1, UTF_8);
testDecodeAll(input, ResolvableType.forClass(String.class), step -> step
.expectNext(u, e, o)
.verifyComplete(), null, null);
}
@Override
protected void testDecodeError(Publisher<DataBuffer> input, ResolvableType outputType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
input = Flux.concat(
Flux.from(input).take(1),
Flux.error(new InputException()));
Flux<String> result = this.decoder.decode(input, outputType, mimeType, hints);
StepVerifier.create(result)
.expectError(InputException.class)
.verify();
testDecodeAll(input, TYPE, step -> step.expectNext(u, e, o).verifyComplete(), null, null);
}
@Test
@ -105,21 +84,21 @@ public class StringDecoderTests extends AbstractDecoderTestCase<StringDecoder> {
Flux<DataBuffer> source = toDataBuffers(s, 2, UTF_16BE);
MimeType mimeType = MimeTypeUtils.parseMimeType("text/plain;charset=utf-16be");
testDecode(source, TYPE, step -> step
.expectNext(u, e, o)
.verifyComplete(), mimeType, null);
testDecode(source, TYPE, step -> step.expectNext(u, e, o).verifyComplete(), mimeType, null);
}
private Flux<DataBuffer> toDataBuffers(String s, int length, Charset charset) {
byte[] bytes = s.getBytes(charset);
List<DataBuffer> dataBuffers = new ArrayList<>();
List<byte[]> chunks = new ArrayList<>();
for (int i = 0; i < bytes.length; i += length) {
DataBuffer dataBuffer = this.bufferFactory.allocateBuffer(length);
dataBuffer.write(bytes, i, length);
dataBuffers.add(dataBuffer);
chunks.add(Arrays.copyOfRange(bytes, i, i + length));
}
return Flux.fromIterable(dataBuffers);
return Flux.fromIterable(chunks)
.map(chunk -> {
DataBuffer dataBuffer = this.bufferFactory.allocateBuffer(length);
dataBuffer.write(chunk, 0, chunk.length);
return dataBuffer;
});
}
@Test

View File

@ -63,6 +63,11 @@ class LeakAwareDataBuffer implements PooledDataBuffer {
return this.leakError;
}
public DataBuffer getDelegate() {
return this.delegate;
}
@Override
public boolean isAllocated() {
return this.delegate instanceof PooledDataBuffer &&

View File

@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import io.netty.buffer.PooledByteBufAllocator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetbrains.annotations.NotNull;
@ -55,7 +56,7 @@ public class LeakAwareDataBufferFactory implements DataBufferFactory {
* {@link DefaultDataBufferFactory}.
*/
public LeakAwareDataBufferFactory() {
this(new DefaultDataBufferFactory());
this(new NettyDataBufferFactory(PooledByteBufAllocator.DEFAULT));
}
/**
@ -67,6 +68,7 @@ public class LeakAwareDataBufferFactory implements DataBufferFactory {
this.delegate = delegate;
}
/**
* Checks whether all of the data buffers allocated by this factory have also been released.
* If not, then an {@link AssertionError} is thrown. Typically used from a JUnit {@link After}
@ -126,6 +128,10 @@ public class LeakAwareDataBufferFactory implements DataBufferFactory {
@Override
public DataBuffer join(List<? extends DataBuffer> dataBuffers) {
// Remove LeakAwareDataBuffer wrapper so delegate can find native buffers
dataBuffers = dataBuffers.stream()
.map(o -> o instanceof LeakAwareDataBuffer ? ((LeakAwareDataBuffer) o).getDelegate() : o)
.collect(Collectors.toList());
return new LeakAwareDataBuffer(this.delegate.join(dataBuffers), this);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -25,8 +25,10 @@ import java.util.function.Function;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.core.io.buffer.DefaultDataBufferFactory;
import org.springframework.http.HttpHeaders;
@ -54,10 +56,17 @@ public class MockServerHttpResponse extends AbstractServerHttpResponse {
public MockServerHttpResponse() {
super(new DefaultDataBufferFactory());
this(new DefaultDataBufferFactory());
}
public MockServerHttpResponse(DataBufferFactory dataBufferFactory) {
super(dataBufferFactory);
this.writeHandler = body -> {
this.body = body.cache();
return this.body.then();
// Avoid .then() which causes data buffers to be released
MonoProcessor<Void> completion = MonoProcessor.create();
this.body = body.doOnComplete(completion::onComplete).doOnError(completion::onError).cache();
this.body.subscribe();
return completion;
};
}
@ -125,8 +134,10 @@ public class MockServerHttpResponse extends AbstractServerHttpResponse {
* charset or "UTF-8" by default.
*/
public Mono<String> getBodyAsString() {
Charset charset = Optional.ofNullable(getHeaders().getContentType()).map(MimeType::getCharset)
.orElse(StandardCharsets.UTF_8);
return getBody()
.reduce(bufferFactory().allocateBuffer(), (previous, current) -> {
previous.write(current);
@ -137,8 +148,10 @@ public class MockServerHttpResponse extends AbstractServerHttpResponse {
}
private static String bufferToString(DataBuffer buffer, Charset charset) {
Assert.notNull(charset, "'charset' must not be null");
byte[] bytes = new byte[buffer.readableByteCount()];
buffer.read(bytes);
DataBufferUtils.release(buffer);
return new String(bytes, charset);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -81,8 +81,16 @@ class ReactorClientHttpRequest extends AbstractClientHttpRequest implements Zero
@Override
public Mono<Void> writeWith(Publisher<? extends DataBuffer> body) {
return doCommit(() -> {
Flux<ByteBuf> byteBufFlux = Flux.from(body).map(NettyDataBufferFactory::toByteBuf);
return this.outbound.send(byteBufFlux).then();
// Send as Mono if possible as an optimization hint to Reactor Netty
if (body instanceof Mono) {
Mono<ByteBuf> byteBufMono = Mono.from(body).map(NettyDataBufferFactory::toByteBuf);
return this.outbound.send(byteBufMono).then();
}
else {
Flux<ByteBuf> byteBufFlux = Flux.from(body).map(NettyDataBufferFactory::toByteBuf);
return this.outbound.send(byteBufFlux).then();
}
});
}

View File

@ -125,16 +125,14 @@ public class EncoderHttpMessageWriter<T> implements HttpMessageWriter<T> {
}))
.flatMap(buffer -> {
headers.setContentLength(buffer.readableByteCount());
return message.writeWith(
Mono.fromCallable(() -> buffer)
.doOnDiscard(PooledDataBuffer.class, PooledDataBuffer::release));
return message.writeWith(Mono.just(buffer)
.doOnDiscard(PooledDataBuffer.class, PooledDataBuffer::release));
});
}
if (isStreamingMediaType(contentType)) {
return message.writeAndFlushWith(body.map(buffer ->
Mono.fromCallable(() -> buffer)
.doOnDiscard(PooledDataBuffer.class, PooledDataBuffer::release)));
Mono.just(buffer).doOnDiscard(PooledDataBuffer.class, PooledDataBuffer::release)));
}
return message.writeWith(body);

View File

@ -28,6 +28,8 @@ import reactor.core.publisher.Mono;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.core.io.buffer.PooledDataBuffer;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpLogging;
import org.springframework.http.HttpStatus;
@ -172,9 +174,16 @@ public abstract class AbstractServerHttpResponse implements ServerHttpResponse {
}
@Override
@SuppressWarnings("unchecked")
public final Mono<Void> writeWith(Publisher<? extends DataBuffer> body) {
return new ChannelSendOperator<>(body,
writePublisher -> doCommit(() -> writeWithInternal(writePublisher)))
// Write as Mono if possible as an optimization hint to Reactor Netty
// ChannelSendOperator not necessary for Mono
if (body instanceof Mono) {
return ((Mono<? extends DataBuffer>) body).flatMap(buffer ->
doCommit(() -> writeWithInternal(Mono.just(buffer)))
.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release));
}
return new ChannelSendOperator<>(body, inner -> doCommit(() -> writeWithInternal(inner)))
.doOnError(t -> removeContentLength());
}

View File

@ -1,5 +1,5 @@
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@ -85,8 +85,7 @@ public class FormHttpMessageWriterTests extends AbstractLeakCheckingTestCase {
String expected = "name+1=value+1&name+2=value+2%2B1&name+2=value+2%2B2&name+3";
StepVerifier.create(response.getBody())
.consumeNextWith(stringConsumer(
expected))
.consumeNextWith(stringConsumer(expected))
.expectComplete()
.verify();
HttpHeaders headers = response.getHeaders();
@ -96,8 +95,7 @@ public class FormHttpMessageWriterTests extends AbstractLeakCheckingTestCase {
private Consumer<DataBuffer> stringConsumer(String expected) {
return dataBuffer -> {
String value =
DataBufferTestUtils.dumpString(dataBuffer, StandardCharsets.UTF_8);
String value = DataBufferTestUtils.dumpString(dataBuffer, StandardCharsets.UTF_8);
DataBufferUtils.release(dataBuffer);
assertEquals(expected, value);
};

View File

@ -24,7 +24,6 @@ import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import io.netty.buffer.ByteBufAllocator;
import org.junit.Before;
import org.junit.Test;
import org.reactivestreams.Publisher;
@ -38,7 +37,6 @@ import reactor.test.StepVerifier;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.LeakAwareDataBufferFactory;
import org.springframework.core.io.buffer.NettyDataBufferFactory;
import static org.junit.Assert.*;
@ -135,8 +133,7 @@ public class ChannelSendOperatorTests {
@Test // gh-22720
public void cancelWhileItemCached() {
NettyDataBufferFactory delegate = new NettyDataBufferFactory(ByteBufAllocator.DEFAULT);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory(delegate);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory();
ChannelSendOperator<DataBuffer> operator = new ChannelSendOperator<>(
Mono.fromCallable(() -> {
@ -164,8 +161,7 @@ public class ChannelSendOperatorTests {
// 2. writeFunction applied and writeCompletionBarrier subscribed to it
// 3. Write Publisher fails right after that and before request(n) from server
NettyDataBufferFactory delegate = new NettyDataBufferFactory(ByteBufAllocator.DEFAULT);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory(delegate);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory();
ZeroDemandSubscriber writeSubscriber = new ZeroDemandSubscriber();
ChannelSendOperator<DataBuffer> operator = new ChannelSendOperator<>(
@ -200,8 +196,7 @@ public class ChannelSendOperatorTests {
// 2. writeFunction applied and writeCompletionBarrier subscribed to it
// 3. writeFunction fails, e.g. to flush status and headers, before request(n) from server
NettyDataBufferFactory delegate = new NettyDataBufferFactory(ByteBufAllocator.DEFAULT);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory(delegate);
LeakAwareDataBufferFactory bufferFactory = new LeakAwareDataBufferFactory();
ChannelSendOperator<DataBuffer> operator = new ChannelSendOperator<>(
Flux.create(sink -> {

View File

@ -25,6 +25,7 @@ import java.util.function.Function;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.MonoProcessor;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
@ -61,8 +62,11 @@ public class MockServerHttpResponse extends AbstractServerHttpResponse {
public MockServerHttpResponse(DataBufferFactory dataBufferFactory) {
super(dataBufferFactory);
this.writeHandler = body -> {
this.body = body.cache();
return this.body.then();
// Avoid .then() which causes data buffers to be released
MonoProcessor<Void> completion = MonoProcessor.create();
this.body = body.doOnComplete(completion::onComplete).doOnError(completion::onError).cache();
this.body.subscribe();
return completion;
};
}

View File

@ -135,8 +135,7 @@ public class ModelAttributeMethodArgumentResolver extends HandlerMethodArgumentR
BindingResult errors = binder.getBindingResult();
if (adapter != null) {
return adapter.fromPublisher(errors.hasErrors() ?
Mono.error(new WebExchangeBindException(parameter, errors)) :
valueMono);
Mono.error(new WebExchangeBindException(parameter, errors)) : valueMono);
}
else {
if (errors.hasErrors() && !hasErrorsArgument(parameter)) {

View File

@ -17,7 +17,6 @@ package org.springframework.web.reactive.result.view;
import java.util.function.Supplier;
import io.netty.buffer.PooledByteBufAllocator;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import reactor.core.publisher.BaseSubscriber;
@ -26,7 +25,6 @@ import reactor.core.publisher.Mono;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferFactory;
import org.springframework.core.io.buffer.LeakAwareDataBufferFactory;
import org.springframework.core.io.buffer.NettyDataBufferFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseCookie;
@ -47,8 +45,7 @@ public class ZeroDemandResponse implements ServerHttpResponse {
public ZeroDemandResponse() {
NettyDataBufferFactory delegate = new NettyDataBufferFactory(PooledByteBufAllocator.DEFAULT);
this.bufferFactory = new LeakAwareDataBufferFactory(delegate);
this.bufferFactory = new LeakAwareDataBufferFactory();
}