Jackson2JsonTokenizer unwraps top level array only
When the input stream is a JSON array the tokenizer skips over the start and end array tokens in order to decode into an array of top-level objects. However in this mode it also skips over nested start and end array tokens which prevents proper parsing of JSR-310 dates (represented with an array syntax) as well as any fields that are collections. Issue: SPR-15803
This commit is contained in:
parent
c3e3df57f8
commit
bd0de7086e
|
|
@ -140,7 +140,7 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
|
|||
}
|
||||
|
||||
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
|
||||
if (token != JsonToken.START_ARRAY && token != JsonToken.END_ARRAY) {
|
||||
if (!isTopLevelArrayToken(token)) {
|
||||
this.tokenBuffer.copyCurrentEvent(this.parser);
|
||||
}
|
||||
|
||||
|
|
@ -151,6 +151,11 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
|
|||
}
|
||||
}
|
||||
|
||||
private boolean isTopLevelArrayToken(JsonToken token) {
|
||||
return (token == JsonToken.START_ARRAY && this.arrayDepth == 1) ||
|
||||
(token == JsonToken.END_ARRAY && this.arrayDepth == 0);
|
||||
}
|
||||
|
||||
public void endOfInput() {
|
||||
this.inputFeeder.endOfInput();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -116,6 +116,19 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
|
|||
asList("{\"foo\": \"bar\"}",
|
||||
"{\"foo\": \"baz\"}"));
|
||||
|
||||
// SPR-15803
|
||||
testTokenize(
|
||||
singletonList("[" +
|
||||
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
|
||||
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
|
||||
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}" +
|
||||
"]"),
|
||||
asList(
|
||||
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
|
||||
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
|
||||
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}")
|
||||
);
|
||||
|
||||
testTokenize(
|
||||
asList("[{\"foo\": \"foofoo\", \"bar\"",
|
||||
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),
|
||||
|
|
|
|||
Loading…
Reference in New Issue