37
37
import org .springframework .core .io .buffer .DataBuffer ;
38
38
import org .springframework .core .io .buffer .DataBufferLimitException ;
39
39
import org .springframework .core .io .buffer .DataBufferUtils ;
40
- import org .springframework .lang .Nullable ;
41
40
42
41
/**
43
42
* {@link Function} to transform a JSON stream of arbitrary size, byte array
@@ -67,7 +66,6 @@ final class Jackson2Tokenizer {
67
66
68
67
private int byteCount ;
69
68
70
- @ Nullable // yet initialized by calling createToken() in the constructor
71
69
private TokenBuffer tokenBuffer ;
72
70
73
71
@@ -85,7 +83,7 @@ private Jackson2Tokenizer(JsonParser parser, DeserializationContext deserializat
85
83
this .forceUseOfBigDecimal = forceUseOfBigDecimal ;
86
84
this .inputFeeder = (ByteArrayFeeder ) this .parser .getNonBlockingInputFeeder ();
87
85
this .maxInMemorySize = maxInMemorySize ;
88
- createToken ();
86
+ this . tokenBuffer = createToken ();
89
87
}
90
88
91
89
@@ -176,9 +174,8 @@ private void processTokenNormal(JsonToken token, List<TokenBuffer> result) throw
176
174
177
175
if ((token .isStructEnd () || token .isScalarValue ()) && this .objectDepth == 0 && this .arrayDepth == 0 ) {
178
176
result .add (this .tokenBuffer );
179
- createToken ();
177
+ this . tokenBuffer = createToken ();
180
178
}
181
-
182
179
}
183
180
184
181
private void processTokenArray (JsonToken token , List <TokenBuffer > result ) throws IOException {
@@ -189,13 +186,14 @@ private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws
189
186
if (this .objectDepth == 0 && (this .arrayDepth == 0 || this .arrayDepth == 1 ) &&
190
187
(token == JsonToken .END_OBJECT || token .isScalarValue ())) {
191
188
result .add (this .tokenBuffer );
192
- createToken ();
189
+ this . tokenBuffer = createToken ();
193
190
}
194
191
}
195
192
196
- private void createToken () {
197
- this .tokenBuffer = new TokenBuffer (this .parser , this .deserializationContext );
198
- this .tokenBuffer .forceUseOfBigDecimal (this .forceUseOfBigDecimal );
193
+ private TokenBuffer createToken () {
194
+ TokenBuffer tokenBuffer = new TokenBuffer (this .parser , this .deserializationContext );
195
+ tokenBuffer .forceUseOfBigDecimal (this .forceUseOfBigDecimal );
196
+ return tokenBuffer ;
199
197
}
200
198
201
199
private boolean isTopLevelArrayToken (JsonToken token ) {
@@ -233,8 +231,8 @@ private void raiseLimitException() {
233
231
* @param objectMapper the current mapper instance
234
232
* @param tokenizeArrays if {@code true} and the "top level" JSON object is
235
233
* an array, each element is returned individually immediately after it is received
236
- * @param forceUseOfBigDecimal if {@code true}, any floating point values encountered in source will use
237
- * {@link java.math.BigDecimal}
234
+ * @param forceUseOfBigDecimal if {@code true}, any floating point values encountered
235
+ * in source will use {@link java.math.BigDecimal}
238
236
* @param maxInMemorySize maximum memory size
239
237
* @return the resulting token buffers
240
238
*/
@@ -248,8 +246,8 @@ public static Flux<TokenBuffer> tokenize(Flux<DataBuffer> dataBuffers, JsonFacto
248
246
context = ((DefaultDeserializationContext ) context ).createInstance (
249
247
objectMapper .getDeserializationConfig (), parser , objectMapper .getInjectableValues ());
250
248
}
251
- Jackson2Tokenizer tokenizer = new Jackson2Tokenizer ( parser , context , tokenizeArrays , forceUseOfBigDecimal ,
252
- maxInMemorySize );
249
+ Jackson2Tokenizer tokenizer =
250
+ new Jackson2Tokenizer ( parser , context , tokenizeArrays , forceUseOfBigDecimal , maxInMemorySize );
253
251
return dataBuffers .concatMapIterable (tokenizer ::tokenize ).concatWith (tokenizer .endOfInput ());
254
252
}
255
253
catch (IOException ex ) {
0 commit comments