提交 f6693e79 编写于 作者: J Juergen Hoeller

Jackson2Tokenizer passes DeserializationContext into TokenBuffer

Closes gh-22510
上级 6c599208
......@@ -86,7 +86,8 @@ public abstract class AbstractJackson2Decoder extends Jackson2CodecSupport imple
public Flux<Object> decode(Publisher<DataBuffer> input, ResolvableType elementType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(Flux.from(input), this.jsonFactory, true);
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
Flux.from(input), this.jsonFactory, getObjectMapper().getDeserializationContext(), true);
return decodeInternal(tokens, elementType, mimeType, hints);
}
......@@ -94,7 +95,8 @@ public abstract class AbstractJackson2Decoder extends Jackson2CodecSupport imple
public Mono<Object> decodeToMono(Publisher<DataBuffer> input, ResolvableType elementType,
@Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(Flux.from(input), this.jsonFactory, false);
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
Flux.from(input), this.jsonFactory, getObjectMapper().getDeserializationContext(), false);
return decodeInternal(tokens, elementType, mimeType, hints).singleOrEmpty();
}
......
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -26,13 +26,13 @@ import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.async.ByteArrayFeeder;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.util.TokenBuffer;
import reactor.core.publisher.Flux;
import org.springframework.core.codec.DecodingException;
import org.springframework.core.io.buffer.DataBuffer;
import org.springframework.core.io.buffer.DataBufferUtils;
import org.springframework.util.Assert;
/**
* {@link Function} to transform a JSON stream of arbitrary size, byte array
......@@ -40,6 +40,8 @@ import org.springframework.util.Assert;
* well-formed JSON object.
*
* @author Arjen Poutsma
* @author Rossen Stoyanchev
* @author Juergen Hoeller
* @since 5.0
*/
final class Jackson2Tokenizer {
......@@ -59,36 +61,15 @@ final class Jackson2Tokenizer {
private final ByteArrayFeeder inputFeeder;
private Jackson2Tokenizer(JsonParser parser, boolean tokenizeArrayElements) {
Assert.notNull(parser, "'parser' must not be null");
private Jackson2Tokenizer(
JsonParser parser, DeserializationContext deserializationContext, boolean tokenizeArrayElements) {
this.parser = parser;
this.tokenizeArrayElements = tokenizeArrayElements;
this.tokenBuffer = new TokenBuffer(parser);
this.tokenBuffer = new TokenBuffer(parser, deserializationContext);
this.inputFeeder = (ByteArrayFeeder) this.parser.getNonBlockingInputFeeder();
}
/**
* Tokenize the given {@code Flux<DataBuffer>} into {@code Flux<TokenBuffer>}.
* @param dataBuffers the source data buffers
* @param jsonFactory the factory to use
* @param tokenizeArrayElements if {@code true} and the "top level" JSON
* object is an array, each element is returned individually, immediately
* after it is received.
* @return the result token buffers
*/
public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFactory jsonFactory,
boolean tokenizeArrayElements) {
try {
JsonParser parser = jsonFactory.createNonBlockingByteArrayParser();
Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, tokenizeArrayElements);
return dataBuffers.flatMap(tokenizer::tokenize, Flux::error, tokenizer::endOfInput);
}
catch (IOException ex) {
return Flux.error(ex);
}
}
private Flux<TokenBuffer> tokenize(DataBuffer dataBuffer) {
byte[] bytes = new byte[dataBuffer.readableByteCount()];
......@@ -100,8 +81,7 @@ final class Jackson2Tokenizer {
return parseTokenBufferFlux();
}
catch (JsonProcessingException ex) {
return Flux.error(new DecodingException(
"JSON decoding error: " + ex.getOriginalMessage(), ex));
return Flux.error(new DecodingException("JSON decoding error: " + ex.getOriginalMessage(), ex));
}
catch (IOException ex) {
return Flux.error(ex);
......@@ -114,8 +94,7 @@ final class Jackson2Tokenizer {
return parseTokenBufferFlux();
}
catch (JsonProcessingException ex) {
return Flux.error(new DecodingException(
"JSON decoding error: " + ex.getOriginalMessage(), ex));
return Flux.error(new DecodingException("JSON decoding error: " + ex.getOriginalMessage(), ex));
}
catch (IOException ex) {
return Flux.error(ex);
......@@ -128,12 +107,11 @@ final class Jackson2Tokenizer {
while (true) {
JsonToken token = this.parser.nextToken();
// SPR-16151: Smile data format uses null to separate documents
if ((token == JsonToken.NOT_AVAILABLE) ||
if (token == JsonToken.NOT_AVAILABLE ||
(token == null && (token = this.parser.nextToken()) == null)) {
break;
}
updateDepth(token);
if (!this.tokenizeArrayElements) {
processTokenNormal(token, result);
}
......@@ -164,8 +142,7 @@ final class Jackson2Tokenizer {
private void processTokenNormal(JsonToken token, List<TokenBuffer> result) throws IOException {
this.tokenBuffer.copyCurrentEvent(this.parser);
if ((token.isStructEnd() || token.isScalarValue()) &&
this.objectDepth == 0 && this.arrayDepth == 0) {
if ((token.isStructEnd() || token.isScalarValue()) && this.objectDepth == 0 && this.arrayDepth == 0) {
result.add(this.tokenBuffer);
this.tokenBuffer = new TokenBuffer(this.parser);
}
......@@ -177,8 +154,7 @@ final class Jackson2Tokenizer {
this.tokenBuffer.copyCurrentEvent(this.parser);
}
if (this.objectDepth == 0 &&
(this.arrayDepth == 0 || this.arrayDepth == 1) &&
if (this.objectDepth == 0 && (this.arrayDepth == 0 || this.arrayDepth == 1) &&
(token == JsonToken.END_OBJECT || token.isScalarValue())) {
result.add(this.tokenBuffer);
this.tokenBuffer = new TokenBuffer(this.parser);
......@@ -190,4 +166,26 @@ final class Jackson2Tokenizer {
(token == JsonToken.END_ARRAY && this.arrayDepth == 0));
}
/**
* Tokenize the given {@code Flux<DataBuffer>} into {@code Flux<TokenBuffer>}.
* @param dataBuffers the source data buffers
* @param jsonFactory the factory to use
* @param tokenizeArrayElements if {@code true} and the "top level" JSON object is
* an array, each element is returned individually immediately after it is received
* @return the resulting token buffers
*/
public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFactory jsonFactory,
DeserializationContext deserializationContext, boolean tokenizeArrayElements) {
try {
JsonParser parser = jsonFactory.createNonBlockingByteArrayParser();
Jackson2Tokenizer tokenizer = new Jackson2Tokenizer(parser, deserializationContext, tokenizeArrayElements);
return dataBuffers.flatMap(tokenizer::tokenize, Flux::error, tokenizer::endOfInput);
}
catch (IOException ex) {
return Flux.error(ex);
}
}
}
/*
* Copyright 2002-2018 the original author or authors.
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
......@@ -37,19 +37,20 @@ import org.springframework.core.codec.DecodingException;
import org.springframework.core.io.buffer.AbstractLeakCheckingTestCase;
import org.springframework.core.io.buffer.DataBuffer;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.Arrays.*;
import static java.util.Collections.*;
/**
* @author Arjen Poutsma
* @author Rossen Stoyanchev
* @author Juergen Hoeller
*/
public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
private ObjectMapper objectMapper;
private JsonFactory jsonFactory;
private ObjectMapper objectMapper;
@Before
public void createParser() {
......@@ -57,6 +58,7 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
this.objectMapper = new ObjectMapper(this.jsonFactory);
}
@Test
public void doNotTokenizeArrayElements() {
testTokenize(
......@@ -185,7 +187,8 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
Flux<DataBuffer> source = Flux.just(buffer)
.concatWith(Flux.error(new RuntimeException()));
Flux<TokenBuffer> result = Jackson2Tokenizer.tokenize(source, this.jsonFactory, true);
Flux<TokenBuffer> result = Jackson2Tokenizer.tokenize(
source, this.jsonFactory, this.objectMapper.getDeserializationContext(), true);
StepVerifier.create(result)
.expectError(RuntimeException.class)
......@@ -195,7 +198,8 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
@Test // SPR-16521
public void jsonEOFExceptionIsWrappedAsDecodingError() {
Flux<DataBuffer> source = Flux.just(stringBuffer("{\"status\": \"noClosingQuote}"));
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(source, this.jsonFactory, false);
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(
source, this.jsonFactory, this.objectMapper.getDeserializationContext(), false);
StepVerifier.create(tokens)
.expectError(DecodingException.class)
......@@ -204,10 +208,9 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {
Flux<TokenBuffer> tokenBufferFlux = Jackson2Tokenizer.tokenize(
Flux.fromIterable(source).map(this::stringBuffer),
this.jsonFactory,
this.jsonFactory, this.objectMapper.getDeserializationContext(),
tokenizeArrayElements);
Flux<String> result = tokenBufferFlux
......@@ -234,7 +237,6 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
}
private static class JSONAssertConsumer implements Consumer<String> {
private final String expected;
......@@ -253,4 +255,5 @@ public class Jackson2TokenizerTests extends AbstractLeakCheckingTestCase {
}
}
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册