提交 6959e66b 编写于 作者: R Rossen Stoyanchev

JsonEOFException is wrapped as DecodingError

Jackson2Tokenizer now also wraps JsonProcessingException's on
endOfInput(), as it also does in tokenize(DataBuffer). This ensures
treating those errors as 400 vs 500.

Issue: SPR-16521
上级 a6d527e5
......@@ -27,7 +27,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.core.async.ByteArrayFeeder;
import com.fasterxml.jackson.databind.util.TokenBuffer;
import org.jetbrains.annotations.NotNull;
import reactor.core.publisher.Flux;
import org.springframework.core.codec.DecodingException;
......@@ -114,12 +113,15 @@ class Jackson2Tokenizer {
try {
return parseTokenBufferFlux();
}
catch (JsonProcessingException ex) {
return Flux.error(new DecodingException(
"JSON decoding error: " + ex.getOriginalMessage(), ex));
}
catch (IOException ex) {
return Flux.error(ex);
}
}
@NotNull
private Flux<TokenBuffer> parseTokenBufferFlux() throws IOException {
List<TokenBuffer> result = new ArrayList<>();
......
......@@ -32,11 +32,12 @@ import org.skyscreamer.jsonassert.JSONAssert;
import reactor.core.publisher.Flux;
import reactor.test.StepVerifier;
import org.springframework.core.codec.DecodingException;
import org.springframework.core.io.buffer.AbstractDataBufferAllocatingTestCase;
import org.springframework.core.io.buffer.DataBuffer;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
import static java.util.Arrays.*;
import static java.util.Collections.*;
/**
* @author Arjen Poutsma
......@@ -174,6 +175,14 @@ public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase
asList("1", "2", "3"), true);
}
@Test(expected = DecodingException.class) // SPR-16521
public void jsonEOFExceptionIsWrappedAsDecodingError() {
Flux<DataBuffer> source = Flux.just(stringBuffer("{\"status\": \"noClosingQuote}"));
Flux<TokenBuffer> tokens = Jackson2Tokenizer.tokenize(source, this.jsonFactory, false);
tokens.blockLast();
}
private void testTokenize(List<String> source, List<String> expected, boolean tokenizeArrayElements) {
Flux<DataBuffer> sourceFlux = Flux.fromIterable(source)
.map(this::stringBuffer);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册