Renamed DataBufferUtils/DataBufferFactory.compose to join

Issue: SPR-16365
This commit is contained in:
Arjen Poutsma 2018-01-22 10:50:13 +01:00
parent 3f3141cdda
commit 0befc60c8f
16 changed files with 42 additions and 37 deletions

View File

@ -63,7 +63,7 @@ public abstract class AbstractDataBufferDecoder<T> extends AbstractDecoder<T> {
public Mono<T> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
return DataBufferUtils.compose(inputStream)
return DataBufferUtils.join(inputStream)
.map(buffer -> decodeDataBuffer(buffer, elementType, mimeType, hints));
}

View File

@ -63,13 +63,15 @@ public interface DataBufferFactory {
DataBuffer wrap(byte[] bytes);
/**
* Create a composite data buffer from the list of provided data buffers. Depending on the
* implementation, the returned buffer may be a single buffer containing all data of the
* provided buffers, or it may be a true composite that contains references to the buffers.
* Return a new {@code DataBuffer} composed of the {@code dataBuffers} elements joined together.
* Depending on the implementation, the returned buffer may be a single buffer containing all
* data of the provided buffers, or it may be a true composite that contains references to the
* buffers.
* <p>Note that the given data buffers do <strong>not</strong> have to be released, as they are
* released as part of the returned composite.
* @param dataBuffers the data buffers to be composed
* @return a buffer that composes {@code dataBuffers} into one
* @return a buffer that is composed from the {@code dataBuffers} argument
* @since 5.0.3
*/
DataBuffer compose(List<DataBuffer> dataBuffers);
DataBuffer join(List<? extends DataBuffer> dataBuffers);
}

View File

@ -507,23 +507,23 @@ public abstract class DataBufferUtils {
}
/**
* Composes the buffers in the given {@link Publisher} into a single data buffer. Depending on
* the {@code DataBuffer} implementation, the returned buffer may be a single buffer containing
* all data of the provided buffers, or it may be a true composite that contains references to
* the buffers.
* @param publisher the data buffers that are to be composed
* @return the composed data buffer
* Return a new {@code DataBuffer} composed of the {@code dataBuffers} elements joined together.
* Depending on the {@link DataBuffer} implementation, the returned buffer may be a single
* buffer containing all data of the provided buffers, or it may be a true composite that
* contains references to the buffers.
* @param dataBuffers the data buffers that are to be composed
* @return a buffer that is composed from the {@code dataBuffers} argument
* @since 5.0.3
*/
public static Mono<DataBuffer> compose(Publisher<DataBuffer> publisher) {
Assert.notNull(publisher, "'publisher' must not be null");
public static Mono<DataBuffer> join(Publisher<? extends DataBuffer> dataBuffers) {
Assert.notNull(dataBuffers, "'dataBuffers' must not be null");
Flux<DataBuffer> source = Flux.from(publisher);
return source.collectList()
.filter(dataBuffers -> !dataBuffers.isEmpty())
.map(dataBuffers -> {
DataBufferFactory bufferFactory = dataBuffers.get(0).factory();
return bufferFactory.compose(dataBuffers);
return Flux.from(dataBuffers)
.collectList()
.filter(list -> !list.isEmpty())
.map(list -> {
DataBufferFactory bufferFactory = list.get(0).factory();
return bufferFactory.join(list);
});
}

View File

@ -109,15 +109,18 @@ public class DefaultDataBufferFactory implements DataBufferFactory {
* in {@code dataBuffers}.
*/
@Override
public DataBuffer compose(List<DataBuffer> dataBuffers) {
public DataBuffer join(List<? extends DataBuffer> dataBuffers) {
Assert.notEmpty(dataBuffers, "'dataBuffers' must not be empty");
int capacity = dataBuffers.stream()
.mapToInt(DataBuffer::readableByteCount)
.sum();
DefaultDataBuffer dataBuffer = allocateBuffer(capacity);
return dataBuffers.stream()
DataBuffer result = dataBuffers.stream()
.map(o -> (DataBuffer) o)
.reduce(dataBuffer, DataBuffer::write);
dataBuffers.forEach(DataBufferUtils::release);
return result;
}
@Override

View File

@ -87,7 +87,7 @@ public class NettyDataBufferFactory implements DataBufferFactory {
* <p>This implementation uses Netty's {@link CompositeByteBuf}.
*/
@Override
public DataBuffer compose(List<DataBuffer> dataBuffers) {
public DataBuffer join(List<? extends DataBuffer> dataBuffers) {
Assert.notNull(dataBuffers, "'dataBuffers' must not be null");
CompositeByteBuf composite = this.byteBufAllocator.compositeBuffer(dataBuffers.size());
for (DataBuffer dataBuffer : dataBuffers) {

View File

@ -480,8 +480,8 @@ public class DataBufferTests extends AbstractDataBufferAllocatingTestCase {
}
@Test
public void composite() {
DataBuffer composite = this.bufferFactory.compose(Arrays.asList(stringBuffer("a"),
public void join() {
DataBuffer composite = this.bufferFactory.join(Arrays.asList(stringBuffer("a"),
stringBuffer("b"), stringBuffer("c")));
assertEquals(3, composite.readableByteCount());
byte[] bytes = new byte[3];

View File

@ -322,13 +322,13 @@ public class DataBufferUtilsTests extends AbstractDataBufferAllocatingTestCase {
}
@Test
public void compose() {
public void join() {
DataBuffer foo = stringBuffer("foo");
DataBuffer bar = stringBuffer("bar");
DataBuffer baz = stringBuffer("baz");
Flux<DataBuffer> flux = Flux.just(foo, bar, baz);
DataBuffer result = DataBufferUtils.compose(flux).block(Duration.ofSeconds(5));
DataBuffer result = DataBufferUtils.join(flux).block(Duration.ofSeconds(5));
assertEquals("foobarbaz", DataBufferTestUtils.dumpString(result, StandardCharsets.UTF_8));

View File

@ -96,7 +96,7 @@ public class FormHttpMessageReader implements HttpMessageReader<MultiValueMap<St
MediaType contentType = message.getHeaders().getContentType();
Charset charset = getMediaTypeCharset(contentType);
return DataBufferUtils.compose(message.getBody())
return DataBufferUtils.join(message.getBody())
.map(buffer -> {
CharBuffer charBuffer = charset.decode(buffer.asByteBuffer());
String body = charBuffer.toString();

View File

@ -103,7 +103,7 @@ public class XmlEventDecoder extends AbstractDecoder<XMLEvent> {
.doFinally(signalType -> aaltoMapper.endOfInput());
}
else {
Mono<DataBuffer> singleBuffer = DataBufferUtils.compose(flux);
Mono<DataBuffer> singleBuffer = DataBufferUtils.join(flux);
return singleBuffer.
flatMapMany(dataBuffer -> {
try {

View File

@ -89,7 +89,7 @@ public class SynchronossPartHttpMessageReaderTests {
assertTrue(part instanceof FilePart);
assertEquals("fooPart", part.name());
assertEquals("foo.txt", ((FilePart) part).filename());
DataBuffer buffer = DataBufferUtils.compose(part.content()).block();
DataBuffer buffer = DataBufferUtils.join(part.content()).block();
assertEquals(12, buffer.readableByteCount());
byte[] byteContent = new byte[12];
buffer.read(byteContent);

View File

@ -103,7 +103,7 @@ public class MultipartIntegrationTests extends AbstractHttpHandlerIntegrationTes
assertEquals("fooPart", part.name());
assertTrue(part instanceof FilePart);
assertEquals("foo.txt", ((FilePart) part).filename());
DataBuffer buffer = DataBufferUtils.compose(part.content()).block();
DataBuffer buffer = DataBufferUtils.join(part.content()).block();
assertEquals(12, buffer.readableByteCount());
byte[] byteContent = new byte[12];
buffer.read(byteContent);

View File

@ -457,7 +457,7 @@ class DefaultWebClient implements WebClient {
private static Mono<WebClientResponseException> createResponseException(ClientResponse response) {
return DataBufferUtils.compose(response.body(BodyExtractors.toDataBuffers()))
return DataBufferUtils.join(response.body(BodyExtractors.toDataBuffers()))
.map(dataBuffer -> {
byte[] bytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(bytes);

View File

@ -112,7 +112,7 @@ public class AppCacheManifestTransformer extends ResourceTransformerSupport {
DataBufferFactory bufferFactory = exchange.getResponse().bufferFactory();
Flux<DataBuffer> flux = DataBufferUtils
.read(outputResource, bufferFactory, StreamUtils.BUFFER_SIZE);
return DataBufferUtils.compose(flux)
return DataBufferUtils.join(flux)
.flatMap(dataBuffer -> {
CharBuffer charBuffer = DEFAULT_CHARSET.decode(dataBuffer.asByteBuffer());
DataBufferUtils.release(dataBuffer);

View File

@ -46,7 +46,7 @@ public class ContentVersionStrategy extends AbstractFileNameVersionStrategy {
public Mono<String> getResourceVersion(Resource resource) {
Flux<DataBuffer> flux =
DataBufferUtils.read(resource, dataBufferFactory, StreamUtils.BUFFER_SIZE);
return DataBufferUtils.compose(flux)
return DataBufferUtils.join(flux)
.map(buffer -> {
byte[] result = new byte[buffer.readableByteCount()];
buffer.read(result);

View File

@ -89,7 +89,7 @@ public class CssLinkResourceTransformer extends ResourceTransformerSupport {
DataBufferFactory bufferFactory = exchange.getResponse().bufferFactory();
Flux<DataBuffer> flux = DataBufferUtils
.read(ouptputResource, bufferFactory, StreamUtils.BUFFER_SIZE);
return DataBufferUtils.compose(flux)
return DataBufferUtils.join(flux)
.flatMap(dataBuffer -> {
CharBuffer charBuffer = DEFAULT_CHARSET.decode(dataBuffer.asByteBuffer());
DataBufferUtils.release(dataBuffer);

View File

@ -332,7 +332,7 @@ public class BodyInsertersTests {
Mono<Void> result = inserter.insert(request, this.context);
StepVerifier.create(result).expectComplete().verify();
StepVerifier.create(DataBufferUtils.compose(request.getBody()))
StepVerifier.create(DataBufferUtils.join(request.getBody()))
.consumeNextWith(dataBuffer -> {
byte[] resultBytes = new byte[dataBuffer.readableByteCount()];
dataBuffer.read(resultBytes);