1
1
/*
2
- * Copyright 2002-2019 the original author or authors.
2
+ * Copyright 2002-2020 the original author or authors.
3
3
*
4
4
* Licensed under the Apache License, Version 2.0 (the "License");
5
5
* you may not use this file except in compliance with the License.
27
27
import com .fasterxml .jackson .core .JsonToken ;
28
28
import com .fasterxml .jackson .core .async .ByteArrayFeeder ;
29
29
import com .fasterxml .jackson .databind .DeserializationContext ;
30
+ import com .fasterxml .jackson .databind .DeserializationFeature ;
30
31
import com .fasterxml .jackson .databind .ObjectMapper ;
31
32
import com .fasterxml .jackson .databind .deser .DefaultDeserializationContext ;
32
33
import com .fasterxml .jackson .databind .util .TokenBuffer ;
36
37
import org .springframework .core .io .buffer .DataBuffer ;
37
38
import org .springframework .core .io .buffer .DataBufferLimitException ;
38
39
import org .springframework .core .io .buffer .DataBufferUtils ;
40
+ import org .springframework .lang .Nullable ;
39
41
40
42
/**
41
43
* {@link Function} to transform a JSON stream of arbitrary size, byte array
@@ -55,34 +57,39 @@ final class Jackson2Tokenizer {
55
57
56
58
private final boolean tokenizeArrayElements ;
57
59
58
- private TokenBuffer tokenBuffer ;
60
+ private final boolean forceUseOfBigDecimal ;
61
+
62
+ private final int maxInMemorySize ;
59
63
60
64
private int objectDepth ;
61
65
62
66
private int arrayDepth ;
63
67
64
- private final int maxInMemorySize ;
65
-
66
68
private int byteCount ;
67
69
70
+ @ Nullable // yet initialized by calling createToken() in the constructor
71
+ private TokenBuffer tokenBuffer ;
72
+
68
73
69
74
// TODO: change to ByteBufferFeeder when supported by Jackson
70
75
// See https://github.com/FasterXML/jackson-core/issues/478
71
76
private final ByteArrayFeeder inputFeeder ;
72
77
73
78
74
79
private Jackson2Tokenizer (JsonParser parser , DeserializationContext deserializationContext ,
75
- boolean tokenizeArrayElements , int maxInMemorySize ) {
80
+ boolean tokenizeArrayElements , boolean forceUseOfBigDecimal , int maxInMemorySize ) {
76
81
77
82
this .parser = parser ;
78
83
this .deserializationContext = deserializationContext ;
79
84
this .tokenizeArrayElements = tokenizeArrayElements ;
80
- this .tokenBuffer = new TokenBuffer ( parser , deserializationContext ) ;
85
+ this .forceUseOfBigDecimal = forceUseOfBigDecimal ;
81
86
this .inputFeeder = (ByteArrayFeeder ) this .parser .getNonBlockingInputFeeder ();
82
87
this .maxInMemorySize = maxInMemorySize ;
88
+ createToken ();
83
89
}
84
90
85
91
92
+
86
93
private Flux <TokenBuffer > tokenize (DataBuffer dataBuffer ) {
87
94
int bufferSize = dataBuffer .readableByteCount ();
88
95
byte [] bytes = new byte [dataBuffer .readableByteCount ()];
@@ -132,6 +139,9 @@ else if (token == null ) { // !previousNull
132
139
previousNull = true ;
133
140
continue ;
134
141
}
142
+ else {
143
+ previousNull = false ;
144
+ }
135
145
updateDepth (token );
136
146
if (!this .tokenizeArrayElements ) {
137
147
processTokenNormal (token , result );
@@ -165,7 +175,7 @@ private void processTokenNormal(JsonToken token, List<TokenBuffer> result) throw
165
175
166
176
if ((token .isStructEnd () || token .isScalarValue ()) && this .objectDepth == 0 && this .arrayDepth == 0 ) {
167
177
result .add (this .tokenBuffer );
168
- this . tokenBuffer = new TokenBuffer ( this . parser , this . deserializationContext );
178
+ createToken ( );
169
179
}
170
180
171
181
}
@@ -178,10 +188,15 @@ private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws
178
188
if (this .objectDepth == 0 && (this .arrayDepth == 0 || this .arrayDepth == 1 ) &&
179
189
(token == JsonToken .END_OBJECT || token .isScalarValue ())) {
180
190
result .add (this .tokenBuffer );
181
- this . tokenBuffer = new TokenBuffer ( this . parser , this . deserializationContext );
191
+ createToken ( );
182
192
}
183
193
}
184
194
195
+ private void createToken () {
196
+ this .tokenBuffer = new TokenBuffer (this .parser , this .deserializationContext );
197
+ this .tokenBuffer .forceUseOfBigDecimal (this .forceUseOfBigDecimal );
198
+ }
199
+
185
200
private boolean isTopLevelArrayToken (JsonToken token ) {
186
201
return this .objectDepth == 0 && ((token == JsonToken .START_ARRAY && this .arrayDepth == 1 ) ||
187
202
(token == JsonToken .END_ARRAY && this .arrayDepth == 0 ));
@@ -229,7 +244,9 @@ public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFacto
229
244
context = ((DefaultDeserializationContext ) context ).createInstance (
230
245
objectMapper .getDeserializationConfig (), parser , objectMapper .getInjectableValues ());
231
246
}
232
- Jackson2Tokenizer tokenizer = new Jackson2Tokenizer (parser , context , tokenizeArrays , maxInMemorySize );
247
+ boolean forceUseOfBigDecimal = objectMapper .isEnabled (DeserializationFeature .USE_BIG_DECIMAL_FOR_FLOATS );
248
+ Jackson2Tokenizer tokenizer = new Jackson2Tokenizer (parser , context , tokenizeArrays , forceUseOfBigDecimal ,
249
+ maxInMemorySize );
233
250
return dataBuffers .flatMap (tokenizer ::tokenize , Flux ::error , tokenizer ::endOfInput );
234
251
}
235
252
catch (IOException ex ) {
0 commit comments