3636import org .elasticsearch .index .mapper .vectors .DenseVectorFieldMapper ;
3737import org .elasticsearch .index .shard .ShardId ;
3838import org .elasticsearch .inference .ChunkedInference ;
39- import org .elasticsearch .inference .ChunkingSettings ;
4039import org .elasticsearch .inference .InferenceService ;
4140import org .elasticsearch .inference .InferenceServiceRegistry ;
4241import org .elasticsearch .inference .Model ;
5554import org .elasticsearch .xpack .core .inference .results .ChunkedInferenceEmbedding ;
5655import org .elasticsearch .xpack .core .inference .results .ChunkedInferenceError ;
5756import org .elasticsearch .xpack .inference .InferencePlugin ;
58- import org .elasticsearch .xpack .inference .chunking .ChunkingSettingsBuilder ;
5957import org .elasticsearch .xpack .inference .mapper .SemanticTextField ;
6058import org .elasticsearch .xpack .inference .model .TestModel ;
6159import org .elasticsearch .xpack .inference .registry .ModelRegistry ;
7977import static org .elasticsearch .xpack .inference .action .filter .ShardBulkInferenceActionFilter .getIndexRequestOrNull ;
8078import static org .elasticsearch .xpack .inference .mapper .SemanticTextField .getChunksFieldName ;
8179import static org .elasticsearch .xpack .inference .mapper .SemanticTextField .getOriginalTextFieldName ;
82- import static org .elasticsearch .xpack .inference .mapper .SemanticTextFieldTests .generateRandomChunkingSettings ;
8380import static org .elasticsearch .xpack .inference .mapper .SemanticTextFieldTests .randomChunkedInferenceEmbeddingSparse ;
8481import static org .elasticsearch .xpack .inference .mapper .SemanticTextFieldTests .randomSemanticText ;
8582import static org .elasticsearch .xpack .inference .mapper .SemanticTextFieldTests .randomSemanticTextInput ;
@@ -383,10 +380,7 @@ public void testManyRandomDocs() throws Exception {
383380 for (int i = 0 ; i < numInferenceFields ; i ++) {
384381 String field = randomAlphaOfLengthBetween (5 , 10 );
385382 String inferenceId = randomFrom (inferenceModelMap .keySet ());
386- Map <String , Object > chunkingSettingsMap = Optional .ofNullable (generateRandomChunkingSettings ())
387- .map (ChunkingSettings ::asMap )
388- .orElse (null );
389- inferenceFieldMap .put (field , new InferenceFieldMetadata (field , inferenceId , new String [] { field }, chunkingSettingsMap ));
383+ inferenceFieldMap .put (field , new InferenceFieldMetadata (field , inferenceId , new String [] { field }, null ));
390384 }
391385
392386 int numRequests = atLeast (100 );
@@ -544,9 +538,6 @@ private static BulkItemRequest[] randomBulkItemRequest(
544538 for (var entry : fieldInferenceMap .values ()) {
545539 String field = entry .getName ();
546540 var model = modelMap .get (entry .getInferenceId ());
547- ChunkingSettings chunkingSettings = entry .getChunkingSettings () != null
548- ? ChunkingSettingsBuilder .fromMap (new HashMap <>(entry .getChunkingSettings ()))
549- : null ;
550541 Object inputObject = randomSemanticTextInput ();
551542 String inputText = inputObject .toString ();
552543 docMap .put (field , inputObject );
@@ -566,21 +557,14 @@ private static BulkItemRequest[] randomBulkItemRequest(
566557 useLegacyFormat ,
567558 field ,
568559 model ,
569- chunkingSettings ,
560+ null ,
570561 List .of (inputText ),
571562 results ,
572563 requestContentType
573564 );
574565 } else {
575566 Map <String , List <String >> inputTextMap = Map .of (field , List .of (inputText ));
576- semanticTextField = randomSemanticText (
577- useLegacyFormat ,
578- field ,
579- model ,
580- chunkingSettings ,
581- List .of (inputText ),
582- requestContentType
583- );
567+ semanticTextField = randomSemanticText (useLegacyFormat , field , model , null , List .of (inputText ), requestContentType );
584568 model .putResult (inputText , toChunkedResult (useLegacyFormat , inputTextMap , semanticTextField ));
585569 }
586570
0 commit comments