Honour ObjectMapper feature in Jackson2Tokenizer
After this commit, Jackson2Tokenizer honours ObjectMapper's DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS feature when creating TokenBuffers. Closes gh-24479
This commit is contained in:
parent
c648425822
commit
45555f77a6
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2002-2019 the original author or authors.
|
||||
* Copyright 2002-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
@ -27,6 +27,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import com.fasterxml.jackson.core.async.ByteArrayFeeder;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.deser.DefaultDeserializationContext;
|
||||
import com.fasterxml.jackson.databind.util.TokenBuffer;
|
||||
|
|
@ -37,6 +38,7 @@ import org.springframework.core.codec.DecodingException;
|
|||
import org.springframework.core.io.buffer.DataBuffer;
|
||||
import org.springframework.core.io.buffer.DataBufferLimitException;
|
||||
import org.springframework.core.io.buffer.DataBufferUtils;
|
||||
import org.springframework.lang.Nullable;
|
||||
|
||||
/**
|
||||
* {@link Function} to transform a JSON stream of arbitrary size, byte array
|
||||
|
|
@ -56,16 +58,19 @@ final class Jackson2Tokenizer {
|
|||
|
||||
private final boolean tokenizeArrayElements;
|
||||
|
||||
private TokenBuffer tokenBuffer;
|
||||
private final boolean forceUseOfBigDecimal;
|
||||
|
||||
private final int maxInMemorySize;
|
||||
|
||||
private int objectDepth;
|
||||
|
||||
private int arrayDepth;
|
||||
|
||||
private final int maxInMemorySize;
|
||||
|
||||
private int byteCount;
|
||||
|
||||
@Nullable // yet initialized by calling createToken() in the constructor
|
||||
private TokenBuffer tokenBuffer;
|
||||
|
||||
|
||||
// TODO: change to ByteBufferFeeder when supported by Jackson
|
||||
// See https://github.com/FasterXML/jackson-core/issues/478
|
||||
|
|
@ -73,17 +78,19 @@ final class Jackson2Tokenizer {
|
|||
|
||||
|
||||
private Jackson2Tokenizer(JsonParser parser, DeserializationContext deserializationContext,
|
||||
boolean tokenizeArrayElements, int maxInMemorySize) {
|
||||
boolean tokenizeArrayElements, boolean forceUseOfBigDecimal, int maxInMemorySize) {
|
||||
|
||||
this.parser = parser;
|
||||
this.deserializationContext = deserializationContext;
|
||||
this.tokenizeArrayElements = tokenizeArrayElements;
|
||||
this.tokenBuffer = new TokenBuffer(parser, deserializationContext);
|
||||
this.forceUseOfBigDecimal = forceUseOfBigDecimal;
|
||||
this.inputFeeder = (ByteArrayFeeder) this.parser.getNonBlockingInputFeeder();
|
||||
this.maxInMemorySize = maxInMemorySize;
|
||||
createToken();
|
||||
}
|
||||
|
||||
|
||||
|
||||
private List<TokenBuffer> tokenize(DataBuffer dataBuffer) {
|
||||
int bufferSize = dataBuffer.readableByteCount();
|
||||
byte[] bytes = new byte[bufferSize];
|
||||
|
|
@ -134,6 +141,9 @@ final class Jackson2Tokenizer {
|
|||
previousNull = true;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
previousNull = false;
|
||||
}
|
||||
updateDepth(token);
|
||||
if (!this.tokenizeArrayElements) {
|
||||
processTokenNormal(token, result);
|
||||
|
|
@ -167,7 +177,7 @@ final class Jackson2Tokenizer {
|
|||
|
||||
if ((token.isStructEnd() || token.isScalarValue()) && this.objectDepth == 0 && this.arrayDepth == 0) {
|
||||
result.add(this.tokenBuffer);
|
||||
this.tokenBuffer = new TokenBuffer(this.parser, this.deserializationContext);
|
||||
createToken();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -180,10 +190,15 @@ final class Jackson2Tokenizer {
|
|||
if (this.objectDepth == 0 && (this.arrayDepth == 0 || this.arrayDepth == 1) &&
|
||||
(token == JsonToken.END_OBJECT || token.isScalarValue())) {
|
||||
result.add(this.tokenBuffer);
|
||||
this.tokenBuffer = new TokenBuffer(this.parser, this.deserializationContext);
|
||||
createToken();
|
||||
}
|
||||
}
|
||||
|
||||
private void createToken() {
|
||||
this.tokenBuffer = new TokenBuffer(this.parser, this.deserializationContext);
|
||||
this.tokenBuffer.forceUseOfBigDecimal(this.forceUseOfBigDecimal);
|
||||
}
|
||||
|
||||
private boolean isTopLevelArrayToken(JsonToken token) {
|
||||
return this.objectDepth == 0 && ((token == JsonToken.START_ARRAY && this.arrayDepth == 1) ||
|
||||
(token == JsonToken.END_ARRAY && this.arrayDepth == 0));
|
||||
|
|
@ -231,7 +246,9 @@ final class Jackson2Tokenizer {
|
|||
context = ((DefaultDeserializationContext) context).createInstance(
|
||||
objectMapper.getDeserializationConfig(), parser, objectMapper.getInjectableValues());
|
||||
}
|
||||
Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, context, tokenizeArrays, maxInMemorySize);
|
||||
boolean forceUseOfBigDecimal = objectMapper.isEnabled(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS);
|
||||
Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, context, tokenizeArrays, forceUseOfBigDecimal,
|
||||
maxInMemorySize);
|
||||
return dataBuffers.concatMapIterable(tokenizer::tokenize).concatWith(tokenizer.endOfInput());
|
||||
}
|
||||
catch (IOException ex) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright 2002-2019 the original author or authors.
|
||||
* Copyright 2002-2020 the original author or authors.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
|
|
@ -22,12 +22,17 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.util.List;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import com.fasterxml.jackson.core.TreeNode;
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.util.TokenBuffer;
|
||||
import org.json.JSONException;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.skyscreamer.jsonassert.JSONAssert;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.test.StepVerifier;
|
||||
|
|
@ -39,6 +44,8 @@ import org.springframework.core.testfixture.io.buffer.AbstractLeakCheckingTests;
|
|||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
/**
|
||||
* @author Arjen Poutsma
|
||||
|
|
@ -259,6 +266,34 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTests {
|
|||
.verify();
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {false, true})
|
||||
public void useBigDecimalForFloats(boolean useBigDecimalForFloats) {
|
||||
this.objectMapper.configure(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS, useBigDecimalForFloats);
|
||||
|
||||
Flux<DataBuffer> source = Flux.just(stringBuffer("1E+2"));
|
||||
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(source, this.jsonFactory, this.objectMapper, false, -1);
|
||||
|
||||
StepVerifier.create(tokens)
|
||||
.assertNext(tokenBuffer -> {
|
||||
try {
|
||||
JsonParser parser = tokenBuffer.asParser();
|
||||
JsonToken token = parser.nextToken();
|
||||
assertThat(token).isEqualTo(JsonToken.VALUE_NUMBER_FLOAT);
|
||||
JsonParser.NumberType numberType = parser.getNumberType();
|
||||
if (useBigDecimalForFloats) {
|
||||
assertThat(numberType).isEqualTo(JsonParser.NumberType.BIG_DECIMAL);
|
||||
}
|
||||
else {
|
||||
assertThat(numberType).isEqualTo(JsonParser.NumberType.DOUBLE);
|
||||
}
|
||||
}
|
||||
catch (IOException ex) {
|
||||
fail(ex);
|
||||
}
|
||||
})
|
||||
.verifyComplete();
|
||||
}
|
||||
|
||||
private Flux<String> decode(List<String> source, boolean tokenize, int maxInMemorySize) {
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue