Jackson2JsonTokenizer unwraps top level array only

When the input stream is a JSON array the tokenizer skips over the
start and end array tokens in order to decode into an array of
top-level objects. However in this mode it also skips over nested
start and end array tokens which prevents proper parsing of JSR-310
dates (represented with an array syntax) as well as any fields that
are collections.

Issue: SPR-15803
This commit is contained in:
Rossen Stoyanchev 2017-07-21 12:41:20 +02:00
parent c3e3df57f8
commit bd0de7086e
2 changed files with 19 additions and 1 deletions

View File

@ -140,7 +140,7 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
} }
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException { private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
if (token != JsonToken.START_ARRAY && token != JsonToken.END_ARRAY) { if (!isTopLevelArrayToken(token)) {
this.tokenBuffer.copyCurrentEvent(this.parser); this.tokenBuffer.copyCurrentEvent(this.parser);
} }
@ -151,6 +151,11 @@ class Jackson2Tokenizer implements Function<DataBuffer, Flux<TokenBuffer>> {
} }
} }
private boolean isTopLevelArrayToken(JsonToken token) {
return (token == JsonToken.START_ARRAY && this.arrayDepth == 1) ||
(token == JsonToken.END_ARRAY && this.arrayDepth == 0);
}
public void endOfInput() { public void endOfInput() {
this.inputFeeder.endOfInput(); this.inputFeeder.endOfInput();
} }

View File

@ -116,6 +116,19 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
asList("{\"foo\": \"bar\"}", asList("{\"foo\": \"bar\"}",
"{\"foo\": \"baz\"}")); "{\"foo\": \"baz\"}"));
// SPR-15803
testTokenize(
singletonList("[" +
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}" +
"]"),
asList(
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}")
);
testTokenize( testTokenize(
asList("[{\"foo\": \"foofoo\", \"bar\"", asList("[{\"foo\": \"foofoo\", \"bar\"",
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"), ": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),