Skip to content

Commit 8d8b273

Browse files
committed
Revert a bunch of changes to try to narrow down what broke CI
1 parent c9bfa32 commit 8d8b273

File tree

2 files changed

+14
-25
lines changed

2 files changed

+14
-25
lines changed

server/src/main/java/org/elasticsearch/cluster/metadata/InferenceFieldMetadata.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@ public final class InferenceFieldMetadata implements SimpleDiffable<InferenceFie
4646
private final String[] sourceFields;
4747
private final Map<String, Object> chunkingSettings;
4848

49-
// TODO can this be ChunkingSettings instead of Map<String, Object>?
5049
public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields, Map<String, Object> chunkingSettings) {
5150
this(name, inferenceId, inferenceId, sourceFields, chunkingSettings);
5251
}
@@ -63,11 +62,6 @@ public InferenceFieldMetadata(
6362
this.searchInferenceId = Objects.requireNonNull(searchInferenceId);
6463
this.sourceFields = Objects.requireNonNull(sourceFields);
6564
this.chunkingSettings = chunkingSettings;
66-
67-
// TODO remove this, trying to get stack traces where this called
68-
if (chunkingSettings != null && chunkingSettings.size() != 3) {
69-
throw new IllegalArgumentException("Chunking settings must contain exactly 3 settings");
70-
}
7165
}
7266

7367
public InferenceFieldMetadata(StreamInput input) throws IOException {

x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java

Lines changed: 14 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -307,25 +307,20 @@ public void onFailure(Exception exc) {
307307
}
308308
int currentBatchSize = Math.min(requests.size(), batchSize);
309309
ChunkingSettings chunkingSettings = requests.get(0).chunkingSettings;
310-
List<FieldInferenceRequest> currentBatch = new ArrayList<>();
311-
List<FieldInferenceRequest> others = new ArrayList<>();
312-
for (int i = 0; i < currentBatchSize; i++) {
313-
FieldInferenceRequest request = requests.get(i);
314-
if ((chunkingSettings == null && request.chunkingSettings == null) || request.chunkingSettings.equals(chunkingSettings)) {
315-
currentBatch.add(request);
316-
} else {
317-
others.add(request);
318-
}
319-
}
320-
310+
// final List<ChunkedInputs> currentBatch = new ArrayList<>();
311+
// for (int i = 0; i < currentBatchSize; i++) {
312+
// FieldInferenceRequest request = requests.get(i);
313+
// ChunkedInputs chunkedInputs = new ChunkedInputs(chunkingSettings, List.of(request.input));
314+
// currentBatch.add(chunkedInputs);
315+
// }
316+
final List<FieldInferenceRequest> currentBatch = requests.subList(0, currentBatchSize);
321317
final List<FieldInferenceRequest> nextBatch = requests.subList(currentBatchSize, requests.size());
322-
nextBatch.addAll(others);
323318

324-
// We can assume current batch has all the same chunking settings
325-
ChunkedInputs chunkedInputs = new ChunkedInputs(
326-
chunkingSettings,
327-
currentBatch.stream().map(r -> r.input).collect(Collectors.toList())
328-
);
319+
// ChunkedInputs chunkedInputs = new ChunkedInputs(
320+
// chunkingSettings,
321+
// currentBatch.stream().map(FieldInferenceRequest::input).collect(Collectors.toList())
322+
// );
323+
final List<String> inputs = currentBatch.stream().map(FieldInferenceRequest::input).toList();
329324

330325
ActionListener<List<ChunkedInference>> completionListener = new ActionListener<>() {
331326
@Override
@@ -395,9 +390,9 @@ private void onFinish() {
395390
.chunkedInfer(
396391
inferenceProvider.model(),
397392
null,
398-
chunkedInputs.inputs(),
393+
inputs,
399394
Map.of(),
400-
chunkedInputs.chunkingSettings(),
395+
null, // TODO add chunking settings
401396
InputType.INGEST,
402397
TimeValue.MAX_VALUE,
403398
completionListener

0 commit comments

Comments
 (0)