From 2e854e401b78166f43367e80b9f2c67a2c91aa7e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 7 Aug 2024 17:39:24 +0100 Subject: [PATCH 01/58] Write model parts async --- .../packageloader/action/ModelImporter.java | 30 +++++++++++++------ .../TransportLoadTrainedModelPackage.java | 1 - ...TransportLoadTrainedModelPackageTests.java | 6 +++- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 33d5d5982d2b0..76b9277363b34 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -10,9 +10,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -27,6 +29,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.Objects; +import java.util.concurrent.Semaphore; import static org.elasticsearch.core.Strings.format; @@ -35,26 +38,31 @@ */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB + private static final int MAX_IN_FLIGHT_REQUESTS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; + private final Semaphore requestLimiter; ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task) { this.client = client; this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); + this.requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); } - public void doImport() throws URISyntaxException, IOException, ElasticsearchStatusException { + public void doImport() throws URISyntaxException, IOException, ElasticsearchStatusException, InterruptedException { long size = config.getSize(); + var releasingListener = ActionListener.wrap(r -> requestLimiter.release(), e -> requestLimiter.release()); + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the // download is complete if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { - uploadVocabulary(); + uploadVocabulary(releasingListener); logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); } @@ -84,7 +92,7 @@ public void doImport() throws URISyntaxException, IOException, ElasticsearchStat true ); - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest); + executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, releasingListener); } // get the last part, this time verify the checksum and size @@ -119,11 +127,13 @@ public void doImport() throws URISyntaxException, IOException, ElasticsearchStat true ); - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, finalModelPartRequest); + executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, finalModelPartRequest, releasingListener); + logger.info("waiting for finish"); + requestLimiter.acquire(MAX_IN_FLIGHT_REQUESTS); // cannot acquire until all inflight requests have completed logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } - private void uploadVocabulary() throws URISyntaxException { + private void uploadVocabulary(ActionListener listener) throws URISyntaxException, InterruptedException { ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) ); @@ -136,17 +146,19 @@ private void uploadVocabulary() throws URISyntaxException { true ); - executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request); + executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request, listener); } private void executeRequestIfNotCancelled( ActionType action, - Request request - ) { + Request request, + ActionListener listener + ) throws InterruptedException { if (task.isCancelled()) { throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } - client.execute(action, request).actionGet(); + requestLimiter.acquire(); + client.execute(action, request, listener); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index c4c2c17fcbc12..ee9c21dc24254 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -174,7 +174,6 @@ static void importModel( } else { listener.onResponse(AcknowledgedResponse.TRUE); } - } } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index a3f59e13f2f5b..e8a0757af1581 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -182,7 +182,11 @@ private void assertNotificationAndOnFailure( private ModelImporter createUploader(Exception exception) throws URISyntaxException, IOException { ModelImporter uploader = mock(ModelImporter.class); if (exception != null) { - doThrow(exception).when(uploader).doImport(); + try { + doThrow(exception).when(uploader).doImport(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } return uploader; From 125c82214cd219a2f255a9a60523934ab360b357 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 7 Aug 2024 17:56:08 +0100 Subject: [PATCH 02/58] Update docs/changelog/111684.yaml --- docs/changelog/111684.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 docs/changelog/111684.yaml diff --git a/docs/changelog/111684.yaml b/docs/changelog/111684.yaml new file mode 100644 index 0000000000000..32edb5723cb0a --- /dev/null +++ b/docs/changelog/111684.yaml @@ -0,0 +1,5 @@ +pr: 111684 +summary: Write downloaded model parts async +area: Machine Learning +type: enhancement +issues: [] From be3908288116697ec0e2c90606e544a0e54f6820 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 9 Aug 2024 13:23:03 +0100 Subject: [PATCH 03/58] Pass a listener to import --- .../packageloader/action/ModelImporter.java | 148 ++++++++++-------- .../TransportLoadTrainedModelPackage.java | 79 ++++------ 2 files changed, 117 insertions(+), 110 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 76b9277363b34..5573cff5a5136 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; @@ -24,12 +25,12 @@ import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; -import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.Objects; import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; @@ -38,102 +39,126 @@ */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - private static final int MAX_IN_FLIGHT_REQUESTS = 5; + private static final int MAX_IN_FLIGHT_REQUESTS = 3; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; - private final Semaphore requestLimiter; ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task) { this.client = client; this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); - this.requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); } - public void doImport() throws URISyntaxException, IOException, ElasticsearchStatusException, InterruptedException { + public void doImport(ActionListener finalListener) { long size = config.getSize(); - var releasingListener = ActionListener.wrap(r -> requestLimiter.release(), e -> requestLimiter.release()); + var firstError = new AtomicReference(); + var requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { - uploadVocabulary(releasingListener); + try (var countingListener = new RefCountingListener(1, finalListener.map(ignored -> AcknowledgedResponse.TRUE))) { + var releasingListener = ActionListener.wrap(r -> requestLimiter.release(), e -> { + requestLimiter.release(); + firstError.compareAndSet(null, e); + }); - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - } + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { + uploadVocabulary(requestLimiter, releasingListener); - URI uri = ModelLoaderUtils.resolvePackageLocation( - config.getModelRepository(), - config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION - ); + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + } - InputStream modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); + URI uri = ModelLoaderUtils.resolvePackageLocation( + config.getModelRepository(), + config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION + ); - ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE); + InputStream modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); - // simple round up - int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker( + modelInputStream, + DEFAULT_CHUNK_SIZE + ); - for (int part = 0; part < totalParts - 1; ++part) { - task.setProgress(totalParts, part); + // simple round up + int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + + for (int part = 0; part < totalParts - 1; ++part) { + task.setProgress(totalParts, part); + BytesArray definition = chunkIterator.next(); + + PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( + modelId, + definition, + part, + size, + totalParts, + true + ); + + executeRequestIfNotCancelled( + PutTrainedModelDefinitionPartAction.INSTANCE, + modelPartRequest, + requestLimiter, + releasingListener + ); + } + + // get the last part, this time verify the checksum and size BytesArray definition = chunkIterator.next(); - PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( + if (config.getSha256().equals(chunkIterator.getSha256()) == false) { + String message = format( + "Model sha256 checksums do not match, expected [%s] but got [%s]", + config.getSha256(), + chunkIterator.getSha256() + ); + + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + if (config.getSize() != chunkIterator.getTotalBytesRead()) { + String message = format( + "Model size does not match, expected [%d] but got [%d]", + config.getSize(), + chunkIterator.getTotalBytesRead() + ); + + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + PutTrainedModelDefinitionPartAction.Request finalModelPartRequest = new PutTrainedModelDefinitionPartAction.Request( modelId, definition, - part, + totalParts - 1, size, totalParts, true ); - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, releasingListener); - } - - // get the last part, this time verify the checksum and size - BytesArray definition = chunkIterator.next(); - - if (config.getSha256().equals(chunkIterator.getSha256()) == false) { - String message = format( - "Model sha256 checksums do not match, expected [%s] but got [%s]", - config.getSha256(), - chunkIterator.getSha256() + executeRequestIfNotCancelled( + PutTrainedModelDefinitionPartAction.INSTANCE, + finalModelPartRequest, + requestLimiter, + releasingListener ); - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); - } + requestLimiter.acquire(MAX_IN_FLIGHT_REQUESTS); // cannot acquire until all inflight requests have completed - if (config.getSize() != chunkIterator.getTotalBytesRead()) { - String message = format( - "Model size does not match, expected [%d] but got [%d]", - config.getSize(), - chunkIterator.getTotalBytesRead() - ); - - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + } catch (Exception e) { + +// finalListener.onFailure(e); TODO is this called twice } - - PutTrainedModelDefinitionPartAction.Request finalModelPartRequest = new PutTrainedModelDefinitionPartAction.Request( - modelId, - definition, - totalParts - 1, - size, - totalParts, - true - ); - - executeRequestIfNotCancelled(PutTrainedModelDefinitionPartAction.INSTANCE, finalModelPartRequest, releasingListener); - logger.info("waiting for finish"); - requestLimiter.acquire(MAX_IN_FLIGHT_REQUESTS); // cannot acquire until all inflight requests have completed - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } - private void uploadVocabulary(ActionListener listener) throws URISyntaxException, InterruptedException { + private void uploadVocabulary(Semaphore requestLimiter, ActionListener listener) throws URISyntaxException, + InterruptedException { ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) ); @@ -146,12 +171,13 @@ private void uploadVocabulary(ActionListener listener) thr true ); - executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request, listener); + executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request, requestLimiter, listener); } private void executeRequestIfNotCancelled( ActionType action, Request request, + Semaphore requestLimiter, ActionListener listener ) throws InterruptedException { if (task.isCancelled()) { diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index ee9c21dc24254..d62687aa20b21 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -101,8 +101,10 @@ protected void masterOperation(Task task, Request request, ClusterState state, A downloadTask ); + var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); + threadPool.executor(MachineLearningPackageLoader.UTILITY_THREAD_POOL_NAME) - .execute(() -> importModel(client, taskManager, request, modelImporter, listener, downloadTask)); + .execute(() -> importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask)); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); @@ -136,16 +138,12 @@ static void importModel( ActionListener listener, Task task ) { - String modelId = request.getModelId(); - final AtomicReference exceptionRef = new AtomicReference<>(); - - try { - final long relativeStartNanos = System.nanoTime(); + final String modelId = request.getModelId(); + final long relativeStartNanos = System.nanoTime(); - logAndWriteNotificationAtLevel(auditClient, modelId, "starting model import", Level.INFO); - - modelImporter.doImport(); + logAndWriteNotificationAtLevel(auditClient, modelId, "starting model import", Level.INFO); + var finishListener = ActionListener.wrap(success -> { final long totalRuntimeNanos = System.nanoTime() - relativeStartNanos; logAndWriteNotificationAtLevel( auditClient, @@ -153,28 +151,25 @@ static void importModel( format("finished model import after [%d] seconds", TimeUnit.NANOSECONDS.toSeconds(totalRuntimeNanos)), Level.INFO ); - } catch (TaskCancelledException e) { - recordError(auditClient, modelId, exceptionRef, e, Level.WARNING); - } catch (ElasticsearchException e) { - recordError(auditClient, modelId, exceptionRef, e, Level.ERROR); - } catch (MalformedURLException e) { - recordError(auditClient, modelId, "an invalid URL", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } catch (URISyntaxException e) { - recordError(auditClient, modelId, "an invalid URL syntax", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } catch (IOException e) { - recordError(auditClient, modelId, "an IOException", exceptionRef, e, Level.ERROR, RestStatus.SERVICE_UNAVAILABLE); - } catch (Exception e) { - recordError(auditClient, modelId, "an Exception", exceptionRef, e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); - } finally { - taskManager.unregister(task); - - if (request.isWaitForCompletion()) { - if (exceptionRef.get() != null) { - listener.onFailure(exceptionRef.get()); - } else { - listener.onResponse(AcknowledgedResponse.TRUE); - } - } + listener.onResponse(AcknowledgedResponse.TRUE); + }, exception -> listener.onFailure(processException(auditClient, modelId, exception))); + + modelImporter.doImport(ActionListener.runAfter(finishListener, () -> taskManager.unregister(task))); + } + + static Exception processException(Client auditClient, String modelId, Exception e) { + if (e instanceof TaskCancelledException te) { + return recordError(auditClient, modelId, te, Level.WARNING); + } else if (e instanceof ElasticsearchException es) { + return recordError(auditClient, modelId, es, Level.ERROR); + } else if (e instanceof MalformedURLException) { + return recordError(auditClient, modelId, "an invalid URL", e, Level.ERROR, RestStatus.BAD_REQUEST); + } else if (e instanceof URISyntaxException) { + return recordError(auditClient, modelId, "an invalid URL syntax", e, Level.ERROR, RestStatus.BAD_REQUEST); + } else if (e instanceof IOException) { + return recordError(auditClient, modelId, "an IOException", e, Level.ERROR, RestStatus.SERVICE_UNAVAILABLE); + } else { + return recordError(auditClient, modelId, "an Exception", e, Level.ERROR, RestStatus.INTERNAL_SERVER_ERROR); } } @@ -212,30 +207,16 @@ public ModelDownloadTask createTask(long id, String type, String action, TaskId } } - private static void recordError( - Client client, - String modelId, - AtomicReference exceptionRef, - ElasticsearchException e, - Level level - ) { + private static Exception recordError(Client client, String modelId, ElasticsearchException e, Level level) { String message = format("Model importing failed due to [%s]", e.getDetailedMessage()); logAndWriteNotificationAtLevel(client, modelId, message, level); - exceptionRef.set(e); + return e; } - private static void recordError( - Client client, - String modelId, - String failureType, - AtomicReference exceptionRef, - Exception e, - Level level, - RestStatus status - ) { + private static Exception recordError(Client client, String modelId, String failureType, Exception e, Level level, RestStatus status) { String message = format("Model importing failed due to %s [%s]", failureType, e); logAndWriteNotificationAtLevel(client, modelId, message, level); - exceptionRef.set(new ElasticsearchStatusException(message, status, e)); + return new ElasticsearchStatusException(message, status, e); } private static void logAndWriteNotificationAtLevel(Client client, String modelId, String message, Level level) { From 5310a08c0253c84e7df1401a3ae869b2c417c133 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Fri, 9 Aug 2024 14:23:08 +0100 Subject: [PATCH 04/58] Ref counting WIP --- .../packageloader/action/ModelImporter.java | 21 ++++++------- .../action/ModelImporterTests.java | 24 +++++++++++++++ ...TransportLoadTrainedModelPackageTests.java | 30 ++++++++----------- 3 files changed, 47 insertions(+), 28 deletions(-) create mode 100644 x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 5573cff5a5136..2b28e98658499 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -64,11 +64,11 @@ public void doImport(ActionListener finalListener) { requestLimiter.release(); firstError.compareAndSet(null, e); }); - + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the // download is complete if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { - uploadVocabulary(requestLimiter, releasingListener); + uploadVocabulary(requestLimiter, countingListener); logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); } @@ -105,7 +105,7 @@ public void doImport(ActionListener finalListener) { PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, requestLimiter, - releasingListener + countingListener ); } @@ -145,19 +145,18 @@ public void doImport(ActionListener finalListener) { PutTrainedModelDefinitionPartAction.INSTANCE, finalModelPartRequest, requestLimiter, - releasingListener + countingListener ); - requestLimiter.acquire(MAX_IN_FLIGHT_REQUESTS); // cannot acquire until all inflight requests have completed - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } catch (Exception e) { - + + // finalListener.onFailure(e); TODO is this called twice } } - private void uploadVocabulary(Semaphore requestLimiter, ActionListener listener) throws URISyntaxException, + private void uploadVocabulary(Semaphore requestLimiter, RefCountingListener listener) throws URISyntaxException, InterruptedException { ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) @@ -178,13 +177,15 @@ private void ex ActionType action, Request request, Semaphore requestLimiter, - ActionListener listener + RefCountingListener listener ) throws InterruptedException { if (task.isCancelled()) { throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } requestLimiter.acquire(); - client.execute(action, request, listener); + client.execute(action, request, listener.acquire(response -> requestLimiter.release()) + .delegateResponse((l, e) -> { requestLimiter.release(); l.onFailure(e);}) + ); } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java new file mode 100644 index 0000000000000..7e7f730a2559d --- /dev/null +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.packageloader.action; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.Semaphore; + +public class ModelImporterTests extends ESTestCase { + + public void testAcquire() throws InterruptedException { + var s = new Semaphore(5); + + s.acquire(); + s.acquire(); + s.acquire(); + assertEquals(2, s.availablePermits()); + } +} diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index e8a0757af1581..99b0a736434f8 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -63,49 +63,49 @@ public void testSendsFinishedUploadNotification() { assertThat(notificationArg.getValue().getMessage(), CoreMatchers.containsString("finished model import after")); } - public void testSendsErrorNotificationForInternalError() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForInternalError() throws Exception { ElasticsearchStatusException exception = new ElasticsearchStatusException("exception", RestStatus.INTERNAL_SERVER_ERROR); String message = format("Model importing failed due to [%s]", exception.toString()); assertUploadCallsOnFailure(exception, message, Level.ERROR); } - public void testSendsErrorNotificationForMalformedURL() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForMalformedURL() throws Exception { MalformedURLException exception = new MalformedURLException("exception"); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); } - public void testSendsErrorNotificationForURISyntax() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForURISyntax() throws Exception { URISyntaxException exception = mock(URISyntaxException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL syntax", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); } - public void testSendsErrorNotificationForIOException() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForIOException() throws Exception { IOException exception = mock(IOException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an IOException", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.SERVICE_UNAVAILABLE, Level.ERROR); } - public void testSendsErrorNotificationForException() throws URISyntaxException, IOException { + public void testSendsErrorNotificationForException() throws Exception { RuntimeException exception = mock(RuntimeException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an Exception", exception.toString()); assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); } - public void testSendsWarningNotificationForTaskCancelledException() throws URISyntaxException, IOException { + public void testSendsWarningNotificationForTaskCancelledException() throws Exception { TaskCancelledException exception = new TaskCancelledException("cancelled"); String message = format("Model importing failed due to [%s]", exception.toString()); assertUploadCallsOnFailure(exception, message, Level.WARNING); } - public void testCallsOnResponseWithAcknowledgedResponse() throws URISyntaxException, IOException { + public void testCallsOnResponseWithAcknowledgedResponse() throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); var task = mock(Task.class); @@ -134,15 +134,13 @@ public void testDoesNotCallListenerWhenNotWaitingForCompletion() { ); } - private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws URISyntaxException, - IOException { + private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status, Level level) throws Exception { var esStatusException = new ElasticsearchStatusException(message, status, exception); assertNotificationAndOnFailure(exception, esStatusException, message, level); } - private void assertUploadCallsOnFailure(ElasticsearchException exception, String message, Level level) throws URISyntaxException, - IOException { + private void assertUploadCallsOnFailure(ElasticsearchException exception, String message, Level level) throws Exception { assertNotificationAndOnFailure(exception, exception, message, level); } @@ -151,7 +149,7 @@ private void assertNotificationAndOnFailure( ElasticsearchException onFailureException, String message, Level level - ) throws URISyntaxException, IOException { + ) throws Exception { var client = mock(Client.class); var taskManager = mock(TaskManager.class); var task = mock(Task.class); @@ -179,14 +177,10 @@ private void assertNotificationAndOnFailure( verify(taskManager).unregister(task); } - private ModelImporter createUploader(Exception exception) throws URISyntaxException, IOException { + private ModelImporter createUploader(Exception exception) { ModelImporter uploader = mock(ModelImporter.class); if (exception != null) { - try { - doThrow(exception).when(uploader).doImport(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } + doThrow(exception).when(uploader).doImport(); } return uploader; From 3a57e14f0371d6adfd5ab79c5a3b2018c8fc7db2 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 14 Aug 2024 13:16:09 +0100 Subject: [PATCH 05/58] use ref counting listener --- .../packageloader/action/ModelImporter.java | 199 ++++++++---------- .../TransportLoadTrainedModelPackage.java | 1 - ...TransportLoadTrainedModelPackageTests.java | 21 +- 3 files changed, 103 insertions(+), 118 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 2b28e98658499..626c0f01df9ea 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -11,9 +11,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; @@ -30,7 +27,6 @@ import java.net.URISyntaxException; import java.util.Objects; import java.util.concurrent.Semaphore; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; @@ -55,113 +51,69 @@ class ModelImporter { public void doImport(ActionListener finalListener) { long size = config.getSize(); + // simple round up + int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + InputStream modelInputStream; - var firstError = new AtomicReference(); - var requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); - - try (var countingListener = new RefCountingListener(1, finalListener.map(ignored -> AcknowledgedResponse.TRUE))) { - var releasingListener = ActionListener.wrap(r -> requestLimiter.release(), e -> { - requestLimiter.release(); - firstError.compareAndSet(null, e); - }); - - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { - uploadVocabulary(requestLimiter, countingListener); - - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - } - + try { URI uri = ModelLoaderUtils.resolvePackageLocation( config.getModelRepository(), config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION ); + modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); + } catch (Exception e) { + finalListener.onFailure(e); + return; + } - InputStream modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); - - ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker( - modelInputStream, - DEFAULT_CHUNK_SIZE - ); - - // simple round up - int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); - - for (int part = 0; part < totalParts - 1; ++part) { - task.setProgress(totalParts, part); - BytesArray definition = chunkIterator.next(); - - PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( - modelId, - definition, - part, - size, - totalParts, - true - ); - - executeRequestIfNotCancelled( - PutTrainedModelDefinitionPartAction.INSTANCE, - modelPartRequest, - requestLimiter, - countingListener - ); - } - - // get the last part, this time verify the checksum and size - BytesArray definition = chunkIterator.next(); + ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE); - if (config.getSha256().equals(chunkIterator.getSha256()) == false) { - String message = format( - "Model sha256 checksums do not match, expected [%s] but got [%s]", - config.getSha256(), - chunkIterator.getSha256() - ); - - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); - } + var requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); - if (config.getSize() != chunkIterator.getTotalBytesRead()) { - String message = format( - "Model size does not match, expected [%d] but got [%d]", - config.getSize(), - chunkIterator.getTotalBytesRead() + try (var countingListener = new RefCountingListener(1, finalListener.map(ignored -> { + checkComplete(chunkIterator, totalParts); + return AcknowledgedResponse.TRUE; + }))) { + try { + ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( + ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) ); - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { + requestLimiter.acquire(); + uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { + requestLimiter.release(); + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); + } + + for (int part = 0; part < totalParts; ++part) { + task.setProgress(totalParts, part); + BytesArray definition = chunkIterator.next(); + + if (countingListener.isFailing()) { + logger.warn("listener is failing"); + break; + + } + + if (task.isCancelled()) { + throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); // TODO ?? + } + + requestLimiter.acquire(); + uploadPart(part, totalParts, size, definition, countingListener.acquire(r -> requestLimiter.release())); + } + } catch (Exception e) { + countingListener.acquire().onFailure(e); } - - PutTrainedModelDefinitionPartAction.Request finalModelPartRequest = new PutTrainedModelDefinitionPartAction.Request( - modelId, - definition, - totalParts - 1, - size, - totalParts, - true - ); - - executeRequestIfNotCancelled( - PutTrainedModelDefinitionPartAction.INSTANCE, - finalModelPartRequest, - requestLimiter, - countingListener - ); - - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); - } catch (Exception e) { - - -// finalListener.onFailure(e); TODO is this called twice } } - private void uploadVocabulary(Semaphore requestLimiter, RefCountingListener listener) throws URISyntaxException, - InterruptedException { - ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( - ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) - ); - + private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener listener) + throws URISyntaxException { PutTrainedModelVocabularyAction.Request request = new PutTrainedModelVocabularyAction.Request( modelId, vocabularyParts.vocab(), @@ -170,22 +122,49 @@ private void uploadVocabulary(Semaphore requestLimiter, RefCountingListener list true ); - executeRequestIfNotCancelled(PutTrainedModelVocabularyAction.INSTANCE, request, requestLimiter, listener); + client.execute(PutTrainedModelVocabularyAction.INSTANCE, request, listener); } - private void executeRequestIfNotCancelled( - ActionType action, - Request request, - Semaphore requestLimiter, - RefCountingListener listener - ) throws InterruptedException { - if (task.isCancelled()) { - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); + private void uploadPart( + int partIndex, + int totalParts, + long totalSize, + BytesArray bytes, + ActionListener listener + ) { + PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( + modelId, + bytes, + partIndex, + totalSize, + totalParts, + true + ); + + client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); + } + + private void checkComplete(ModelLoaderUtils.InputStreamChunker chunkIterator, int totalParts) { + if (config.getSha256().equals(chunkIterator.getSha256()) == false) { + String message = format( + "Model sha256 checksums do not match, expected [%s] but got [%s]", + config.getSha256(), + chunkIterator.getSha256() + ); + + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); } - requestLimiter.acquire(); - client.execute(action, request, listener.acquire(response -> requestLimiter.release()) - .delegateResponse((l, e) -> { requestLimiter.release(); l.onFailure(e);}) - ); + if (config.getSize() != chunkIterator.getTotalBytesRead()) { + String message = format( + "Model size does not match, expected [%d] but got [%d]", + config.getSize(), + chunkIterator.getTotalBytesRead() + ); + + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index d62687aa20b21..c7cf1dcefc8ae 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -44,7 +44,6 @@ import java.net.URISyntaxException; import java.util.Map; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index 99b0a736434f8..cbcfd5b760779 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -33,7 +33,7 @@ import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -42,7 +42,7 @@ public class TransportLoadTrainedModelPackageTests extends ESTestCase { private static final String MODEL_IMPORT_FAILURE_MSG_FORMAT = "Model importing failed due to %s [%s]"; public void testSendsFinishedUploadNotification() { - var uploader = mock(ModelImporter.class); + var uploader = createUploader(null); var taskManager = mock(TaskManager.class); var task = mock(Task.class); var client = mock(Client.class); @@ -74,14 +74,14 @@ public void testSendsErrorNotificationForMalformedURL() throws Exception { MalformedURLException exception = new MalformedURLException("exception"); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL", exception.toString()); - assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); + assertUploadCallsOnFailure(exception, message, RestStatus.BAD_REQUEST, Level.ERROR); } public void testSendsErrorNotificationForURISyntax() throws Exception { URISyntaxException exception = mock(URISyntaxException.class); String message = format(MODEL_IMPORT_FAILURE_MSG_FORMAT, "an invalid URL syntax", exception.toString()); - assertUploadCallsOnFailure(exception, message, RestStatus.INTERNAL_SERVER_ERROR, Level.ERROR); + assertUploadCallsOnFailure(exception, message, RestStatus.BAD_REQUEST, Level.ERROR); } public void testSendsErrorNotificationForIOException() throws Exception { @@ -177,11 +177,18 @@ private void assertNotificationAndOnFailure( verify(taskManager).unregister(task); } + @SuppressWarnings("unchecked") private ModelImporter createUploader(Exception exception) { ModelImporter uploader = mock(ModelImporter.class); - if (exception != null) { - doThrow(exception).when(uploader).doImport(); - } + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[0]; + if (exception != null) { + listener.onFailure(exception); + } else { + listener.onResponse(AcknowledgedResponse.TRUE); + } + return null; + }).when(uploader).doImport(any(ActionListener.class)); return uploader; } From 56f5e1c6a5d5cfd4ecc74bff0f2e0ddd1dd77a84 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 14 Aug 2024 13:18:09 +0100 Subject: [PATCH 06/58] tidying --- .../xpack/ml/packageloader/action/ModelImporter.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 626c0f01df9ea..3196aa49cecff 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -94,13 +94,11 @@ public void doImport(ActionListener finalListener) { BytesArray definition = chunkIterator.next(); if (countingListener.isFailing()) { - logger.warn("listener is failing"); break; - } if (task.isCancelled()) { - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); // TODO ?? + throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } requestLimiter.acquire(); @@ -112,8 +110,7 @@ public void doImport(ActionListener finalListener) { } } - private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener listener) - throws URISyntaxException { + private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener listener) { PutTrainedModelVocabularyAction.Request request = new PutTrainedModelVocabularyAction.Request( modelId, vocabularyParts.vocab(), From 9f98dbefb52368a4166824c4d1f7c94c97c0f658 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 14 Aug 2024 16:24:58 +0100 Subject: [PATCH 07/58] add tests --- .../packageloader/action/ModelImporter.java | 42 ++-- .../action/ModelLoaderUtils.java | 4 +- .../action/ModelImporterTests.java | 214 +++++++++++++++++- 3 files changed, 238 insertions(+), 22 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 3196aa49cecff..badca5cce67bc 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -14,8 +14,8 @@ import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; @@ -24,7 +24,6 @@ import java.io.InputStream; import java.net.URI; -import java.net.URISyntaxException; import java.util.Objects; import java.util.concurrent.Semaphore; @@ -54,6 +53,7 @@ public void doImport(ActionListener finalListener) { // simple round up int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); InputStream modelInputStream; + ModelLoaderUtils.VocabularyParts vocabularyParts = null; try { URI uri = ModelLoaderUtils.resolvePackageLocation( @@ -61,27 +61,43 @@ public void doImport(ActionListener finalListener) { config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION ); modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); + + if (config.getVocabularyFile() != null) { + vocabularyParts = ModelLoaderUtils.loadVocabulary( + ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) + ); + } } catch (Exception e) { finalListener.onFailure(e); return; } - ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE); + downloadParts( + new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE), + totalParts, + size, + vocabularyParts, + finalListener + ); + } + void downloadParts( + ModelLoaderUtils.InputStreamChunker chunkIterator, + int totalParts, + long size, + @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, + ActionListener finalListener + ) { var requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); try (var countingListener = new RefCountingListener(1, finalListener.map(ignored -> { - checkComplete(chunkIterator, totalParts); + checkDownloadComplete(chunkIterator, totalParts); return AcknowledgedResponse.TRUE; }))) { try { - ModelLoaderUtils.VocabularyParts vocabularyParts = ModelLoaderUtils.loadVocabulary( - ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) - ); - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the // download is complete - if (Strings.isNullOrEmpty(config.getVocabularyFile()) == false) { + if (vocabularyParts != null) { requestLimiter.acquire(); uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { requestLimiter.release(); @@ -90,13 +106,13 @@ public void doImport(ActionListener finalListener) { } for (int part = 0; part < totalParts; ++part) { - task.setProgress(totalParts, part); - BytesArray definition = chunkIterator.next(); - if (countingListener.isFailing()) { break; } + task.setProgress(totalParts, part); + BytesArray definition = chunkIterator.next(); + if (task.isCancelled()) { throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } @@ -141,7 +157,7 @@ private void uploadPart( client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); } - private void checkComplete(ModelLoaderUtils.InputStreamChunker chunkIterator, int totalParts) { + private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator, int totalParts) { if (config.getSha256().equals(chunkIterator.getSha256()) == false) { String message = format( "Model sha256 checksums do not match, expected [%s] but got [%s]", diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 2f3f9cbf3f32c..bf17a19d9daef 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -67,7 +67,7 @@ static class InputStreamChunker { private final MessageDigest digestSha256 = MessageDigests.sha256(); private final int chunkSize; - private int totalBytesRead = 0; + private long totalBytesRead = 0; InputStreamChunker(InputStream inputStream, int chunkSize) { this.inputStream = inputStream; @@ -96,7 +96,7 @@ public String getSha256() { return MessageDigests.toHexString(digestSha256.digest()); } - public int getTotalBytesRead() { + public long getTotalBytesRead() { return totalBytesRead; } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index 7e7f730a2559d..cb83483a62a4b 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -7,18 +7,218 @@ package org.elasticsearch.xpack.ml.packageloader.action; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; -import java.util.concurrent.Semaphore; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class ModelImporterTests extends ESTestCase { - public void testAcquire() throws InterruptedException { - var s = new Semaphore(5); + public void testDownload() throws IOException { + var client = mockClient(false); + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 5; + var modelDef = modelDefinition(totalParts); + long size = modelDef.stream().mapToInt(BytesArray::length).sum(); + var stream = mockStreamChunker(modelDef); + + var digest = computeDigest(modelDef); + when(stream.getSha256()).thenReturn(digest); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size); + when(stream.getTotalBytesRead()).thenReturn(size); + + importer.downloadParts(stream, totalParts, size, vocab, ActionListener.wrap(r -> { ; }, ESTestCase::fail)); + + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + } + + public void testSizeMismatch() throws IOException { + var client = mockClient(false); + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 5; + var modelDef = modelDefinition(totalParts); + long size = modelDef.stream().mapToInt(BytesArray::length).sum(); + var stream = mockStreamChunker(modelDef); + + var digest = computeDigest(modelDef); + when(stream.getSha256()).thenReturn(digest); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size); + when(stream.getTotalBytesRead()).thenReturn(size - 1); // expected size and read size are different + + var exceptionHolder = new AtomicReference(); + + importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); + + assertThat(exceptionHolder.get().getMessage(), containsString("Model size does not match")); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + } + + public void testDigestMismatch() throws IOException { + var client = mockClient(false); + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 5; + var modelDef = modelDefinition(totalParts); + long size = modelDef.stream().mapToInt(BytesArray::length).sum(); + var stream = mockStreamChunker(modelDef); + + var digest = computeDigest(modelDef); + when(stream.getSha256()).thenReturn(digest); + when(config.getSha256()).thenReturn("0x"); // digest is different + when(config.getSize()).thenReturn(size); + when(stream.getTotalBytesRead()).thenReturn(size); + + var exceptionHolder = new AtomicReference(); + + importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); + + assertThat(exceptionHolder.get().getMessage(), containsString("Model sha256 checksums do not match")); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + } + + public void testPutFailure() throws IOException { + var client = mockClient(true); + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 4; + var modelDef = modelDefinition(totalParts); + int size = modelDef.stream().mapToInt(BytesArray::length).sum(); + var stream = mockStreamChunker(modelDef); + + var exceptionHolder = new AtomicReference(); + + importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); + + assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); + verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + } + + public void testReadFailure() throws IOException { + var client = mockClient(true); + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 4; + var stream = mock(ModelLoaderUtils.InputStreamChunker.class); + when(stream.next()).thenThrow(new IOException("stream failed")); + + var exceptionHolder = new AtomicReference(); + + importer.downloadParts(stream, totalParts, 1L, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); + + assertThat(exceptionHolder.get().getMessage(), containsString("stream failed")); + } + + @SuppressWarnings("unchecked") + public void testUploadVocabFailure() { + var client = mock(Client.class); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new ElasticsearchStatusException("put vocab failed", RestStatus.BAD_REQUEST)); + return null; + }).when(client).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); + + var task = mock(ModelDownloadTask.class); + var config = mock(ModelPackageConfig.class); + + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + + var importer = new ModelImporter(client, "foo", config, task); + int totalParts = 4; + var stream = mock(ModelLoaderUtils.InputStreamChunker.class); + var exceptionHolder = new AtomicReference(); + + importer.downloadParts(stream, totalParts, 1L, vocab, ActionListener.wrap(r -> fail("put vocab failed"), exceptionHolder::set)); + + assertThat(exceptionHolder.get().getMessage(), containsString("put vocab failed")); + verify(client, times(1)).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); + verify(client, never()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + } + + private ModelLoaderUtils.InputStreamChunker mockStreamChunker(List modelDef) throws IOException { + var streamChunker = mock(ModelLoaderUtils.InputStreamChunker.class); + + var first = modelDef.get(0); + var others = new BytesArray[modelDef.size() - 1]; + for (int i = 0; i < modelDef.size() - 1; i++) { + others[i] = modelDef.get(i + 1); + } + + when(streamChunker.next()).thenReturn(first, others); + return streamChunker; + } + + private List modelDefinition(int totalParts) { + var parts = new ArrayList(); + for (int i = 0; i < totalParts; i++) { + parts.add(new BytesArray(randomByteArrayOfLength(4))); + } + return parts; + } + + private String computeDigest(List parts) { + var digest = MessageDigests.sha256(); + for (var part : parts) { + digest.update(part.array()); + } + return MessageDigests.toHexString(digest.digest()); + } + + @SuppressWarnings("unchecked") + private Client mockClient(boolean failPutPart) { + var client = mock(Client.class); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + if (failPutPart) { + listener.onFailure(new IllegalStateException("put model part failed")); + } else { + listener.onResponse(AcknowledgedResponse.TRUE); + } + return null; + }).when(client).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(AcknowledgedResponse.TRUE); + return null; + }).when(client).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); - s.acquire(); - s.acquire(); - s.acquire(); - assertEquals(2, s.availablePermits()); + return client; } } From 9d9a0e663a36ab19463c960f172b396121abe668 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 19 Aug 2024 17:05:29 +0100 Subject: [PATCH 08/58] Add download threadpool --- .../MachineLearningPackageLoader.java | 26 ++++++++++-- ...ortGetTrainedModelPackageConfigAction.java | 2 +- .../TransportLoadTrainedModelPackage.java | 5 ++- .../action/ModelImporterTests.java | 42 +++++++++++++------ 4 files changed, 56 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index e927c46e6bd29..8f28721286ce4 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -15,9 +15,13 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ExecutorBuilder; +import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.xpack.core.ml.packageloader.action.GetTrainedModelPackageConfigAction; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction; import org.elasticsearch.xpack.ml.packageloader.action.ModelDownloadTask; @@ -44,9 +48,6 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin Setting.Property.Dynamic ); - // re-using thread pool setup by the ml plugin - public static final String UTILITY_THREAD_POOL_NAME = "ml_utility"; - // This link will be invalid for serverless, but serverless will never be // air-gapped, so this message should never be needed. private static final String MODEL_REPOSITORY_DOCUMENTATION_LINK = format( @@ -54,6 +55,8 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin Build.current().version().replaceFirst("^(\\d+\\.\\d+).*", "$1") ); + public static final String MODEL_DOWNLOAD_THREADPOOL_NAME = "model_download"; + public MachineLearningPackageLoader() {} @Override @@ -81,6 +84,23 @@ public List getNamedWriteables() { ); } + @Override + public List> getExecutorBuilders(Settings settings) { + return List.of(modelDownloadExecutor(settings)); + } + + public static ExecutorBuilder modelDownloadExecutor(Settings settings) { + // Threadpool with a fixed size of 1 thread + return new FixedExecutorBuilder( + settings, + MODEL_DOWNLOAD_THREADPOOL_NAME, + 1, + 10, + "xpack.ml.model_download_thread_pool", + EsExecutors.TaskTrackingConfig.DO_NOT_TRACK + ); + } + @Override public List getBootstrapChecks() { return List.of(new BootstrapCheck() { diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java index ba50f2f6a6b74..68f869742d9e5 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportGetTrainedModelPackageConfigAction.java @@ -77,7 +77,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A String packagedModelId = request.getPackagedModelId(); logger.debug(() -> format("Fetch package manifest for [%s] from [%s]", packagedModelId, repository)); - threadPool.executor(MachineLearningPackageLoader.UTILITY_THREAD_POOL_NAME).execute(() -> { + threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME).execute(() -> { try { URI uri = ModelLoaderUtils.resolvePackageLocation(repository, packagedModelId + ModelLoaderUtils.METADATA_FILE_EXTENSION); InputStream inputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 7a7ff0180b15f..2c05bd5a7c1bc 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -97,12 +97,13 @@ protected void masterOperation(Task task, Request request, ClusterState state, A parentTaskAssigningClient, request.getModelId(), request.getModelPackageConfig(), - downloadTask + downloadTask, + threadPool ); var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - threadPool.executor(MachineLearningPackageLoader.UTILITY_THREAD_POOL_NAME) + threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) .execute(() -> importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask)); } catch (Exception e) { taskManager.unregister(downloadTask); diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index cb83483a62a4b..29a3734c5a294 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -13,11 +13,16 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.hash.MessageDigests; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; +import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; +import org.junit.After; +import org.junit.Before; import java.io.IOException; import java.util.ArrayList; @@ -36,13 +41,25 @@ public class ModelImporterTests extends ESTestCase { + private TestThreadPool threadPool; + + @Before + public void setUp() { + threadPool = createThreadPool(MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY)); + } + + @After + public void tearDown() { + threadPool.close(); + } + public void testDownload() throws IOException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); - var importer = new ModelImporter(client, "foo", config, task); + var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; var modelDef = modelDefinition(totalParts); long size = modelDef.stream().mapToInt(BytesArray::length).sum(); @@ -54,7 +71,7 @@ public void testDownload() throws IOException { when(config.getSize()).thenReturn(size); when(stream.getTotalBytesRead()).thenReturn(size); - importer.downloadParts(stream, totalParts, size, vocab, ActionListener.wrap(r -> { ; }, ESTestCase::fail)); + importer.downloadParts(stream, size, vocab, ActionListener.wrap(r -> { ; }, ESTestCase::fail)); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } @@ -64,7 +81,7 @@ public void testSizeMismatch() throws IOException { var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); - var importer = new ModelImporter(client, "foo", config, task); + var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; var modelDef = modelDefinition(totalParts); long size = modelDef.stream().mapToInt(BytesArray::length).sum(); @@ -78,7 +95,7 @@ public void testSizeMismatch() throws IOException { var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); + importer.downloadParts(stream, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); assertThat(exceptionHolder.get().getMessage(), containsString("Model size does not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); @@ -89,7 +106,7 @@ public void testDigestMismatch() throws IOException { var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); - var importer = new ModelImporter(client, "foo", config, task); + var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; var modelDef = modelDefinition(totalParts); long size = modelDef.stream().mapToInt(BytesArray::length).sum(); @@ -103,7 +120,7 @@ public void testDigestMismatch() throws IOException { var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); + importer.downloadParts(stream, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); assertThat(exceptionHolder.get().getMessage(), containsString("Model sha256 checksums do not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); @@ -114,7 +131,7 @@ public void testPutFailure() throws IOException { var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); - var importer = new ModelImporter(client, "foo", config, task); + var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 4; var modelDef = modelDefinition(totalParts); int size = modelDef.stream().mapToInt(BytesArray::length).sum(); @@ -122,7 +139,7 @@ public void testPutFailure() throws IOException { var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, totalParts, size, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); + importer.downloadParts(stream, totalParts, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); @@ -133,14 +150,14 @@ public void testReadFailure() throws IOException { var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); - var importer = new ModelImporter(client, "foo", config, task); + var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 4; var stream = mock(ModelLoaderUtils.InputStreamChunker.class); when(stream.next()).thenThrow(new IOException("stream failed")); var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, totalParts, 1L, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); + importer.downloadParts(stream, 1L, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); assertThat(exceptionHolder.get().getMessage(), containsString("stream failed")); } @@ -159,12 +176,11 @@ public void testUploadVocabFailure() { var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); - var importer = new ModelImporter(client, "foo", config, task); - int totalParts = 4; + var importer = new ModelImporter(client, "foo", config, task, threadPool); var stream = mock(ModelLoaderUtils.InputStreamChunker.class); var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, totalParts, 1L, vocab, ActionListener.wrap(r -> fail("put vocab failed"), exceptionHolder::set)); + importer.downloadParts(stream, 1L, vocab, ActionListener.wrap(r -> fail("put vocab failed"), exceptionHolder::set)); assertThat(exceptionHolder.get().getMessage(), containsString("put vocab failed")); verify(client, times(1)).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); From 669909bdcd5aab0bbdec9bbfc6a6365aec6ffdf7 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 19 Aug 2024 17:06:22 +0100 Subject: [PATCH 09/58] less blocking --- .../packageloader/action/ModelImporter.java | 115 ++++++++++++------ .../action/ModelLoaderUtils.java | 28 ++++- .../action/ModelLoaderUtilsTests.java | 6 +- 3 files changed, 106 insertions(+), 43 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index badca5cce67bc..83ec077ac22b2 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -18,14 +18,16 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; +import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; import java.io.InputStream; import java.net.URI; import java.util.Objects; -import java.util.concurrent.Semaphore; +import java.util.concurrent.ExecutorService; import static org.elasticsearch.core.Strings.format; @@ -40,15 +42,18 @@ class ModelImporter { private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; + private final ExecutorService executorService; - ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task) { + ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) { this.client = client; this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); + this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); } public void doImport(ActionListener finalListener) { + long size = config.getSize(); // simple round up int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); @@ -73,8 +78,7 @@ public void doImport(ActionListener finalListener) { } downloadParts( - new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE), - totalParts, + new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE, totalParts), size, vocabularyParts, finalListener @@ -83,46 +87,87 @@ public void doImport(ActionListener finalListener) { void downloadParts( ModelLoaderUtils.InputStreamChunker chunkIterator, - int totalParts, long size, @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener finalListener ) { - var requestLimiter = new Semaphore(MAX_IN_FLIGHT_REQUESTS); - - try (var countingListener = new RefCountingListener(1, finalListener.map(ignored -> { - checkDownloadComplete(chunkIterator, totalParts); + var countingListener = new RefCountingListener(1, finalListener.map(ignored -> { + checkDownloadComplete(chunkIterator); return AcknowledgedResponse.TRUE; - }))) { - try { - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (vocabularyParts != null) { - requestLimiter.acquire(); - uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { - requestLimiter.release(); - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - })); + })); + try { + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (vocabularyParts != null) { + uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); + } + + for (int part = 0; part < MAX_IN_FLIGHT_REQUESTS; ++part) { + if (countingListener.isFailing()) { + break; } - for (int part = 0; part < totalParts; ++part) { - if (countingListener.isFailing()) { - break; - } + task.setProgress(chunkIterator.getTotalParts(), chunkIterator.getCurrentPart().get()); + BytesArray definition = chunkIterator.next(); - task.setProgress(totalParts, part); - BytesArray definition = chunkIterator.next(); + if (task.isCancelled()) { + throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); + } - if (task.isCancelled()) { - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); - } + uploadPart( + part, + chunkIterator.getTotalParts(), + size, + definition, + countingListener.acquire(r -> executorService.execute(() -> doNextPart(size, chunkIterator, countingListener))) + ); + } + } catch (Exception e) { + countingListener.acquire().onFailure(e); + countingListener.close(); + } + } - requestLimiter.acquire(); - uploadPart(part, totalParts, size, definition, countingListener.acquire(r -> requestLimiter.release())); - } - } catch (Exception e) { - countingListener.acquire().onFailure(e); + public void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIterator, RefCountingListener countingListener) { + if (countingListener.isFailing()) { + countingListener.close(); + return; + } + + task.setProgress(chunkIterator.getTotalParts(), chunkIterator.getCurrentPart().get()); + try { + logger.info("doing next part " + chunkIterator.getCurrentPart().get() + ", " + chunkIterator.getTotalParts()); + BytesArray definition = chunkIterator.next(); + + if (task.isCancelled()) { + throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } + + if (definition.length() == 0) { + // done + return; + } + + boolean lastPart = chunkIterator.isFinalPart(); + + uploadPart( + chunkIterator.getCurrentPart().get(), + chunkIterator.getTotalParts(), + size, + definition, + countingListener.acquire(r -> { + if (lastPart) { + countingListener.close(); + } else { + executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); + } + }) + ); + } catch (Exception e) { + countingListener.acquire().onFailure(e); + countingListener.close(); } } @@ -157,7 +202,7 @@ private void uploadPart( client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); } - private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator, int totalParts) { + private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator) { if (config.getSha256().equals(chunkIterator.getSha256()) == false) { String message = format( "Model sha256 checksums do not match, expected [%s] but got [%s]", @@ -178,6 +223,6 @@ private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIter throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); } - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, chunkIterator.getTotalParts())); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index bf17a19d9daef..af55e03c2e024 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -38,6 +38,8 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import static java.net.HttpURLConnection.HTTP_MOVED_PERM; @@ -66,15 +68,19 @@ static class InputStreamChunker { private final InputStream inputStream; private final MessageDigest digestSha256 = MessageDigests.sha256(); private final int chunkSize; + private final int totalParts; + private final AtomicLong totalBytesRead = new AtomicLong(); + private final AtomicInteger currentPart = new AtomicInteger(-1); - private long totalBytesRead = 0; - - InputStreamChunker(InputStream inputStream, int chunkSize) { + InputStreamChunker(InputStream inputStream, int chunkSize, int totalParts) { this.inputStream = inputStream; this.chunkSize = chunkSize; + this.totalParts = totalParts; } public BytesArray next() throws IOException { + currentPart.incrementAndGet(); + int bytesRead = 0; byte[] buf = new byte[chunkSize]; @@ -87,17 +93,29 @@ public BytesArray next() throws IOException { bytesRead += read; } digestSha256.update(buf, 0, bytesRead); - totalBytesRead += bytesRead; + totalBytesRead.addAndGet(bytesRead); return new BytesArray(buf, 0, bytesRead); } + public boolean isFinalPart() { + return currentPart.get() == totalParts - 1; + } + public String getSha256() { return MessageDigests.toHexString(digestSha256.digest()); } public long getTotalBytesRead() { - return totalBytesRead; + return totalBytesRead.get(); + } + + public int getTotalParts() { + return totalParts; + } + + public AtomicInteger getCurrentPart() { + return currentPart; } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java index 661cd12f99957..1979ed7ca5ab4 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java @@ -80,14 +80,14 @@ public void testSha256AndSize() throws IOException { assertEquals(64, expectedDigest.length()); int chunkSize = randomIntBetween(100, 10_000); + int totalParts = (bytes.length + chunkSize - 1) / chunkSize; ModelLoaderUtils.InputStreamChunker inputStreamChunker = new ModelLoaderUtils.InputStreamChunker( new ByteArrayInputStream(bytes), - chunkSize + chunkSize, + totalParts ); - int totalParts = (bytes.length + chunkSize - 1) / chunkSize; - for (int part = 0; part < totalParts - 1; ++part) { assertEquals(chunkSize, inputStreamChunker.next().length()); } From fcc66b441711e59a5dde720a16c543207ab21212 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 19 Aug 2024 17:37:46 +0100 Subject: [PATCH 10/58] tidy up --- .../packageloader/action/ModelImporter.java | 51 ++++++++----------- 1 file changed, 20 insertions(+), 31 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 83ec077ac22b2..a5c0b787470f6 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -95,42 +95,31 @@ void downloadParts( checkDownloadComplete(chunkIterator); return AcknowledgedResponse.TRUE; })); - try { - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (vocabularyParts != null) { - uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - })); - } - - for (int part = 0; part < MAX_IN_FLIGHT_REQUESTS; ++part) { - if (countingListener.isFailing()) { - break; - } - - task.setProgress(chunkIterator.getTotalParts(), chunkIterator.getCurrentPart().get()); - BytesArray definition = chunkIterator.next(); - - if (task.isCancelled()) { - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); - } + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (vocabularyParts != null) { + uploadVocabulary( + vocabularyParts, + countingListener.acquire( + r -> { logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); } + ) + ); + } - uploadPart( - part, - chunkIterator.getTotalParts(), - size, - definition, - countingListener.acquire(r -> executorService.execute(() -> doNextPart(size, chunkIterator, countingListener))) - ); - } - } catch (Exception e) { - countingListener.acquire().onFailure(e); - countingListener.close(); + for (int part = 0; part < MAX_IN_FLIGHT_REQUESTS; ++part) { + doNextPart(size, chunkIterator, countingListener); } + } public void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIterator, RefCountingListener countingListener) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() + ); + if (countingListener.isFailing()) { countingListener.close(); return; From 19218128509cb588ca1486160a9f09dab0ec9283 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Wed, 21 Aug 2024 19:53:41 +0100 Subject: [PATCH 11/58] more tests --- .../MachineLearningPackageLoader.java | 9 +- .../packageloader/action/ModelImporter.java | 90 +++++----- .../action/ModelLoaderUtils.java | 24 +-- .../TransportLoadTrainedModelPackage.java | 4 +- .../MachineLearningPackageLoaderTests.java | 12 ++ .../action/ModelDownloadTaskTests.java | 20 ++- .../action/ModelImporterTests.java | 163 ++++++++++-------- 7 files changed, 188 insertions(+), 134 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index 8f28721286ce4..d76c78f667d93 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -89,13 +89,14 @@ public List> getExecutorBuilders(Settings settings) { return List.of(modelDownloadExecutor(settings)); } - public static ExecutorBuilder modelDownloadExecutor(Settings settings) { - // Threadpool with a fixed size of 1 thread + public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { + // Threadpool with a fixed size of 1 thread for + // downloading the model definition files return new FixedExecutorBuilder( settings, MODEL_DOWNLOAD_THREADPOOL_NAME, - 1, - 10, + 1, // 1 thread + 10, // max items in queue "xpack.ml.model_download_thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index a5c0b787470f6..bf00f14d4dc85 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -28,11 +28,21 @@ import java.net.URI; import java.util.Objects; import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; /** - * A helper class for abstracting out the use of the ModelLoaderUtils to make dependency injection testing easier. + * For downloading the model vocabulary and definition file and + * indexing those files in Elasticsearch. + * Holding the large model definition file in memory will consume + * too much memory, instead it is streamed in chunks and each chunk + * written to the index in a non-blocking request. The number of + * index requests is limited to {@link #MAX_IN_FLIGHT_REQUESTS} + * also to prevent too much memory being used. + * Only 1 thread can read the model definition stream at a time, + * this is ensured by using a fixed size threadpool with a single + * thread. */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB @@ -43,6 +53,7 @@ class ModelImporter { private final ModelPackageConfig config; private final ModelDownloadTask task; private final ExecutorService executorService; + private final AtomicBoolean listenerIsClosed = new AtomicBoolean(false); ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) { this.client = client; @@ -52,7 +63,17 @@ class ModelImporter { this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); } - public void doImport(ActionListener finalListener) { + public void doImport(ActionListener listener) { + executorService.execute(() -> doImportInternal(listener)); + } + + private void doImportInternal(ActionListener finalListener) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() + ); long size = config.getSize(); // simple round up @@ -98,36 +119,33 @@ void downloadParts( // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the // download is complete if (vocabularyParts != null) { - uploadVocabulary( - vocabularyParts, - countingListener.acquire( - r -> { logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); } - ) - ); + uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); } for (int part = 0; part < MAX_IN_FLIGHT_REQUESTS; ++part) { doNextPart(size, chunkIterator, countingListener); } - } - public void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIterator, RefCountingListener countingListener) { + private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIterator, RefCountingListener countingListener) { assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) : format( - "Model download must execute from [%s] but thread is [%s]", - MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, - Thread.currentThread().getName() - ); + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() + ); if (countingListener.isFailing()) { - countingListener.close(); + if (listenerIsClosed.compareAndSet(false, true)) { + countingListener.close(); + } return; } task.setProgress(chunkIterator.getTotalParts(), chunkIterator.getCurrentPart().get()); try { - logger.info("doing next part " + chunkIterator.getCurrentPart().get() + ", " + chunkIterator.getTotalParts()); BytesArray definition = chunkIterator.next(); if (task.isCancelled()) { @@ -135,28 +153,26 @@ public void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkItera } if (definition.length() == 0) { - // done + // download complete + if (listenerIsClosed.compareAndSet(false, true)) { + countingListener.close(); + } return; } - boolean lastPart = chunkIterator.isFinalPart(); - - uploadPart( - chunkIterator.getCurrentPart().get(), - chunkIterator.getTotalParts(), - size, - definition, - countingListener.acquire(r -> { - if (lastPart) { - countingListener.close(); - } else { - executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); - } - }) - ); + // Index the downloaded chunk and schedule the next download once + // the chunk is written. + // The key thing here is that the threadpool only has a single + // thread preventing concurrent access to the model stream while + // allowing multiple index requests to be in flight. + indexPart(chunkIterator.getCurrentPart().get(), chunkIterator.getTotalParts(), size, definition, countingListener.acquire(r -> { + executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); + })); } catch (Exception e) { countingListener.acquire().onFailure(e); - countingListener.close(); + if (listenerIsClosed.compareAndSet(false, true)) { + countingListener.close(); + } } } @@ -172,13 +188,7 @@ private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, client.execute(PutTrainedModelVocabularyAction.INSTANCE, request, listener); } - private void uploadPart( - int partIndex, - int totalParts, - long totalSize, - BytesArray bytes, - ActionListener listener - ) { + private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray bytes, ActionListener listener) { PutTrainedModelDefinitionPartAction.Request modelPartRequest = new PutTrainedModelDefinitionPartAction.Request( modelId, bytes, diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index af55e03c2e024..9d1f6863095bf 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -70,7 +70,7 @@ static class InputStreamChunker { private final int chunkSize; private final int totalParts; private final AtomicLong totalBytesRead = new AtomicLong(); - private final AtomicInteger currentPart = new AtomicInteger(-1); + private final AtomicInteger currentPart = new AtomicInteger(); InputStreamChunker(InputStream inputStream, int chunkSize, int totalParts) { this.inputStream = inputStream; @@ -78,8 +78,11 @@ static class InputStreamChunker { this.totalParts = totalParts; } + public boolean hasNext() { + return false; + } + public BytesArray next() throws IOException { - currentPart.incrementAndGet(); int bytesRead = 0; byte[] buf = new byte[chunkSize]; @@ -92,14 +95,15 @@ public BytesArray next() throws IOException { } bytesRead += read; } - digestSha256.update(buf, 0, bytesRead); - totalBytesRead.addAndGet(bytesRead); - return new BytesArray(buf, 0, bytesRead); - } - - public boolean isFinalPart() { - return currentPart.get() == totalParts - 1; + if (bytesRead > 0) { + digestSha256.update(buf, 0, bytesRead); + totalBytesRead.addAndGet(bytesRead); + currentPart.incrementAndGet(); + return new BytesArray(buf, 0, bytesRead); + } else { + return BytesArray.EMPTY; + } } public String getSha256() { @@ -192,7 +196,7 @@ private ModelLoaderUtils() {} @SuppressWarnings("'java.lang.SecurityManager' is deprecated and marked for removal ") @SuppressForbidden(reason = "we need socket connection to download") - private static InputStream getHttpOrHttpsInputStream(URI uri) throws IOException { + private static InputStream getHttpOrHttpsInputStream(URI uri) { assert uri.getUserInfo() == null : "URI's with credentials are not supported"; diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java index 2c05bd5a7c1bc..8ca029d01d3c0 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackage.java @@ -37,7 +37,6 @@ import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction.Request; -import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; import java.io.IOException; import java.net.MalformedURLException; @@ -103,8 +102,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A var downloadCompleteListener = request.isWaitForCompletion() ? listener : ActionListener.noop(); - threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) - .execute(() -> importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask)); + importModel(client, taskManager, request, modelImporter, downloadCompleteListener, downloadTask); } catch (Exception e) { taskManager.unregister(downloadTask); listener.onFailure(e); diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java index 967d1b4ba4b6a..eb9b212935f24 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java @@ -7,9 +7,13 @@ package org.elasticsearch.xpack.ml.packageloader; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.PathUtils; import org.elasticsearch.test.ESTestCase; +import java.util.List; + import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.oneOf; @@ -80,4 +84,12 @@ public void testValidateModelRepository() { assertEquals("xpack.ml.model_repository does not support authentication", e.getMessage()); } + + public void testThreadPoolHasSingleThread() { + var fixedThreadPool = MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY); + List> settings = fixedThreadPool.getRegisteredSettings(); + var sizeSettting = settings.stream().filter(s -> s.getKey().startsWith("xpack.ml.model_download_thread_pool")).findFirst(); + assertTrue(sizeSettting.isPresent()); + assertEquals(1, sizeSettting.get().get(Settings.EMPTY)); + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java index 0afd08c70cf45..3a682fb6a5094 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelDownloadTaskTests.java @@ -20,14 +20,7 @@ public class ModelDownloadTaskTests extends ESTestCase { public void testStatus() { - var task = new ModelDownloadTask( - 0L, - MODEL_IMPORT_TASK_TYPE, - MODEL_IMPORT_TASK_ACTION, - downloadModelTaskDescription("foo"), - TaskId.EMPTY_TASK_ID, - Map.of() - ); + var task = testTask(); task.setProgress(100, 0); var taskInfo = task.taskInfo("node", true); @@ -39,4 +32,15 @@ public void testStatus() { status = Strings.toString(taskInfo.status()); assertThat(status, containsString("{\"total_parts\":100,\"downloaded_parts\":1}")); } + + public static ModelDownloadTask testTask() { + return new ModelDownloadTask( + 0L, + MODEL_IMPORT_TASK_TYPE, + MODEL_IMPORT_TASK_ACTION, + downloadModelTaskDescription("foo"), + TaskId.EMPTY_TASK_ID, + Map.of() + ); + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index 29a3734c5a294..ab97c08cfe007 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -9,9 +9,10 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; @@ -24,9 +25,11 @@ import org.junit.After; import org.junit.Before; +import java.io.ByteArrayInputStream; import java.io.IOException; -import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsString; @@ -44,126 +47,150 @@ public class ModelImporterTests extends ESTestCase { private TestThreadPool threadPool; @Before - public void setUp() { + public void createThreadPool() { threadPool = createThreadPool(MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY)); } @After - public void tearDown() { + public void closeThreadPool() { threadPool.close(); } - public void testDownload() throws IOException { + public void testDownload() throws InterruptedException { var client = mockClient(false); - var task = mock(ModelDownloadTask.class); + var task = ModelDownloadTaskTests.testTask(); var config = mock(ModelPackageConfig.class); var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; - var modelDef = modelDefinition(totalParts); - long size = modelDef.stream().mapToInt(BytesArray::length).sum(); - var stream = mockStreamChunker(modelDef); + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var stream = mockStreamChunker(modelDef, totalParts, chunkSize); - var digest = computeDigest(modelDef); - when(stream.getSha256()).thenReturn(digest); + var digest = computeDigest(modelDef, totalParts, chunkSize); when(config.getSha256()).thenReturn(digest); when(config.getSize()).thenReturn(size); - when(stream.getTotalBytesRead()).thenReturn(size); - importer.downloadParts(stream, size, vocab, ActionListener.wrap(r -> { ; }, ESTestCase::fail)); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); + importer.downloadParts(stream, size, vocab, latchedListener); + latch.await(); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + assertEquals(totalParts, task.getStatus().downloadProgress().downloadedParts()); + assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); } - public void testSizeMismatch() throws IOException { + public void testSizeMismatch() throws InterruptedException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; - var modelDef = modelDefinition(totalParts); - long size = modelDef.stream().mapToInt(BytesArray::length).sum(); - var stream = mockStreamChunker(modelDef); + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var stream = mockStreamChunker(modelDef, totalParts, chunkSize); - var digest = computeDigest(modelDef); - when(stream.getSha256()).thenReturn(digest); + var digest = computeDigest(modelDef, totalParts, chunkSize); when(config.getSha256()).thenReturn(digest); - when(config.getSize()).thenReturn(size); - when(stream.getTotalBytesRead()).thenReturn(size - 1); // expected size and read size are different + when(config.getSize()).thenReturn(size - 1); // expected size and read size are different var exceptionHolder = new AtomicReference(); - importer.downloadParts(stream, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + importer.downloadParts(stream, size, null, latchedListener); + latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("Model size does not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testDigestMismatch() throws IOException { + public void testDigestMismatch() throws InterruptedException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; - var modelDef = modelDefinition(totalParts); - long size = modelDef.stream().mapToInt(BytesArray::length).sum(); - var stream = mockStreamChunker(modelDef); + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + var stream = mockStreamChunker(modelDef, totalParts, chunkSize); - var digest = computeDigest(modelDef); - when(stream.getSha256()).thenReturn(digest); when(config.getSha256()).thenReturn("0x"); // digest is different when(config.getSize()).thenReturn(size); - when(stream.getTotalBytesRead()).thenReturn(size); var exceptionHolder = new AtomicReference(); - - importer.downloadParts(stream, size, null, ActionListener.wrap(ignore -> {}, exceptionHolder::set)); - + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + importer.downloadParts(stream, size, null, latchedListener); + + latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("Model sha256 checksums do not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testPutFailure() throws IOException { - var client = mockClient(true); + public void testPutFailure() throws InterruptedException { + var client = mockClient(true); // client will fail put var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 4; - var modelDef = modelDefinition(totalParts); - int size = modelDef.stream().mapToInt(BytesArray::length).sum(); - var stream = mockStreamChunker(modelDef); + int chunkSize = 10; + var modelDef = modelDefinition(totalParts, chunkSize); + var stream = mockStreamChunker(modelDef, totalParts, chunkSize); var exceptionHolder = new AtomicReference(); - - importer.downloadParts(stream, totalParts, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); - + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + importer.downloadParts(stream, totalParts * chunkSize, null, latchedListener); + + latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testReadFailure() throws IOException { + public void testReadFailure() throws IOException, InterruptedException { var client = mockClient(true); var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); var importer = new ModelImporter(client, "foo", config, task, threadPool); - int totalParts = 4; var stream = mock(ModelLoaderUtils.InputStreamChunker.class); - when(stream.next()).thenThrow(new IOException("stream failed")); + when(stream.next()).thenThrow(new IOException("stream failed")); // fail the read - var exceptionHolder = new AtomicReference(); - - importer.downloadParts(stream, 1L, null, ActionListener.wrap(r -> fail("unexpected response"), exceptionHolder::set)); + when(stream.getTotalParts()).thenReturn(10); + when(stream.getCurrentPart()).thenReturn(new AtomicInteger()); + var exceptionHolder = new AtomicReference(); + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + importer.downloadParts(stream, 1L, null, latchedListener); + + latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("stream failed")); } @SuppressWarnings("unchecked") - public void testUploadVocabFailure() { + public void testUploadVocabFailure() throws InterruptedException { var client = mock(Client.class); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; @@ -179,40 +206,38 @@ public void testUploadVocabFailure() { var importer = new ModelImporter(client, "foo", config, task, threadPool); var stream = mock(ModelLoaderUtils.InputStreamChunker.class); var exceptionHolder = new AtomicReference(); - - importer.downloadParts(stream, 1L, vocab, ActionListener.wrap(r -> fail("put vocab failed"), exceptionHolder::set)); - + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener( + ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), + latch + ); + importer.downloadParts(stream, 1L, vocab, latchedListener); + + latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put vocab failed")); verify(client, times(1)).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); verify(client, never()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - private ModelLoaderUtils.InputStreamChunker mockStreamChunker(List modelDef) throws IOException { - var streamChunker = mock(ModelLoaderUtils.InputStreamChunker.class); - - var first = modelDef.get(0); - var others = new BytesArray[modelDef.size() - 1]; - for (int i = 0; i < modelDef.size() - 1; i++) { - others[i] = modelDef.get(i + 1); - } - - when(streamChunker.next()).thenReturn(first, others); - return streamChunker; + private ModelLoaderUtils.InputStreamChunker mockStreamChunker(byte[] modelDef, int totalPart, int chunkSize) { + var modelDefStream = new ByteArrayInputStream(modelDef); + return new ModelLoaderUtils.InputStreamChunker(modelDefStream, chunkSize, totalPart); } - private List modelDefinition(int totalParts) { - var parts = new ArrayList(); + private byte[] modelDefinition(int totalParts, int chunkSize) { + var bytes = new byte[totalParts * chunkSize]; for (int i = 0; i < totalParts; i++) { - parts.add(new BytesArray(randomByteArrayOfLength(4))); + System.arraycopy(randomByteArrayOfLength(chunkSize), 0, bytes, i * chunkSize, chunkSize); } - return parts; + return bytes; } - private String computeDigest(List parts) { + private String computeDigest(byte[] modelDef, int totalParts, int chunkSize) { var digest = MessageDigests.sha256(); - for (var part : parts) { - digest.update(part.array()); - } + digest.update(modelDef); + // for (int i=0; i Date: Thu, 22 Aug 2024 10:15:08 +0100 Subject: [PATCH 12/58] remove unused --- .../xpack/ml/packageloader/MachineLearningPackageLoader.java | 2 +- .../xpack/ml/packageloader/action/ModelLoaderUtils.java | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index d76c78f667d93..c502a6840c0db 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -96,7 +96,7 @@ public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { settings, MODEL_DOWNLOAD_THREADPOOL_NAME, 1, // 1 thread - 10, // max items in queue + 50, // max items in queue "xpack.ml.model_download_thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 9d1f6863095bf..0ee1affebcb80 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -78,10 +78,6 @@ static class InputStreamChunker { this.totalParts = totalParts; } - public boolean hasNext() { - return false; - } - public BytesArray next() throws IOException { int bytesRead = 0; From c4ea1467990f1969ac34296618e5258e6e662e8c Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 22 Aug 2024 12:11:43 +0100 Subject: [PATCH 13/58] fix the tests --- .../xpack/ml/packageloader/action/ModelImporter.java | 2 +- .../xpack/ml/packageloader/action/ModelLoaderUtils.java | 2 +- .../xpack/ml/packageloader/action/ModelImporterTests.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index bf00f14d4dc85..43fed15359a5d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -144,7 +144,7 @@ private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIter return; } - task.setProgress(chunkIterator.getTotalParts(), chunkIterator.getCurrentPart().get()); + task.setProgress(chunkIterator.getTotalParts(), Math.max(0, chunkIterator.getCurrentPart().get())); try { BytesArray definition = chunkIterator.next(); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 0ee1affebcb80..4a0fcc5b3d59d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -70,7 +70,7 @@ static class InputStreamChunker { private final int chunkSize; private final int totalParts; private final AtomicLong totalBytesRead = new AtomicLong(); - private final AtomicInteger currentPart = new AtomicInteger(); + private final AtomicInteger currentPart = new AtomicInteger(-1); InputStreamChunker(InputStream inputStream, int chunkSize, int totalParts) { this.inputStream = inputStream; diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index ab97c08cfe007..59c6f0f63e04e 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -79,7 +79,7 @@ public void testDownload() throws InterruptedException { latch.await(); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); - assertEquals(totalParts, task.getStatus().downloadProgress().downloadedParts()); + assertEquals(totalParts - 1, task.getStatus().downloadProgress().downloadedParts()); assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); } From 413a3ab143b064fed4a84b335b69d296261f2dc8 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 22 Aug 2024 13:04:29 +0100 Subject: [PATCH 14/58] 5 in flight requests --- .../xpack/ml/packageloader/action/ModelImporter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 43fed15359a5d..09f8c6be152f9 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -46,7 +46,7 @@ */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - private static final int MAX_IN_FLIGHT_REQUESTS = 3; + private static final int MAX_IN_FLIGHT_REQUESTS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; From 70ffe07c60138543edef37f245bf5be33758a478 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 27 Aug 2024 14:50:47 +0100 Subject: [PATCH 15/58] use another threadpool for writes --- .../MachineLearningPackageLoader.java | 1 + .../ml/packageloader/action/ModelImporter.java | 14 ++++++++------ .../packageloader/action/ModelImporterTests.java | 10 ++++++++-- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index c502a6840c0db..070ba927330cb 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -56,6 +56,7 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin ); public static final String MODEL_DOWNLOAD_THREADPOOL_NAME = "model_download"; + public static final String ML_UTILITY_THREADPOOL_NAME = "ml_utility"; public MachineLearningPackageLoader() {} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 09f8c6be152f9..1f8804a781be7 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -46,13 +46,14 @@ */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - private static final int MAX_IN_FLIGHT_REQUESTS = 5; + private static final int MAX_IN_FLIGHT_REQUESTS = 3; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; - private final ExecutorService executorService; + private final ExecutorService downloadExecutor; + private final ExecutorService writePartExecutor; private final AtomicBoolean listenerIsClosed = new AtomicBoolean(false); ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) { @@ -60,11 +61,12 @@ class ModelImporter { this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); - this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); + this.downloadExecutor = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); + this.writePartExecutor = threadPool.executor(MachineLearningPackageLoader.ML_UTILITY_THREADPOOL_NAME); } public void doImport(ActionListener listener) { - executorService.execute(() -> doImportInternal(listener)); + downloadExecutor.execute(() -> doImportInternal(listener)); } private void doImportInternal(ActionListener finalListener) { @@ -166,7 +168,7 @@ private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIter // thread preventing concurrent access to the model stream while // allowing multiple index requests to be in flight. indexPart(chunkIterator.getCurrentPart().get(), chunkIterator.getTotalParts(), size, definition, countingListener.acquire(r -> { - executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); + downloadExecutor.execute(() -> doNextPart(size, chunkIterator, countingListener)); })); } catch (Exception e) { countingListener.acquire().onFailure(e); @@ -198,7 +200,7 @@ private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray true ); - client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); + writePartExecutor.execute(() -> client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener)); } private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator) { diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index 59c6f0f63e04e..370fcace786e6 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -15,8 +15,10 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; @@ -35,6 +37,7 @@ import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -48,7 +51,10 @@ public class ModelImporterTests extends ESTestCase { @Before public void createThreadPool() { - threadPool = createThreadPool(MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY)); + threadPool = createThreadPool( + MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY), + new ScalingExecutorBuilder("ml_utility", 1, 4, TimeValue.timeValueMinutes(10), false, "xpack.ml.utility_thread_pool") + ); } @After @@ -162,7 +168,7 @@ public void testPutFailure() throws InterruptedException { latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); - verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + verify(client, atLeastOnce()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } public void testReadFailure() throws IOException, InterruptedException { From 573d5afc0e866d5609191ddca6c025ad6ab7a928 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 29 Aug 2024 09:08:15 +0100 Subject: [PATCH 16/58] Revert "use another threadpool for writes" This reverts commit 70ffe07c60138543edef37f245bf5be33758a478. --- .../MachineLearningPackageLoader.java | 1 - .../ml/packageloader/action/ModelImporter.java | 14 ++++++-------- .../packageloader/action/ModelImporterTests.java | 10 ++-------- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index 070ba927330cb..c502a6840c0db 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -56,7 +56,6 @@ public class MachineLearningPackageLoader extends Plugin implements ActionPlugin ); public static final String MODEL_DOWNLOAD_THREADPOOL_NAME = "model_download"; - public static final String ML_UTILITY_THREADPOOL_NAME = "ml_utility"; public MachineLearningPackageLoader() {} diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 1f8804a781be7..09f8c6be152f9 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -46,14 +46,13 @@ */ class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - private static final int MAX_IN_FLIGHT_REQUESTS = 3; + private static final int MAX_IN_FLIGHT_REQUESTS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; private final ModelPackageConfig config; private final ModelDownloadTask task; - private final ExecutorService downloadExecutor; - private final ExecutorService writePartExecutor; + private final ExecutorService executorService; private final AtomicBoolean listenerIsClosed = new AtomicBoolean(false); ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) { @@ -61,12 +60,11 @@ class ModelImporter { this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); - this.downloadExecutor = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); - this.writePartExecutor = threadPool.executor(MachineLearningPackageLoader.ML_UTILITY_THREADPOOL_NAME); + this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); } public void doImport(ActionListener listener) { - downloadExecutor.execute(() -> doImportInternal(listener)); + executorService.execute(() -> doImportInternal(listener)); } private void doImportInternal(ActionListener finalListener) { @@ -168,7 +166,7 @@ private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIter // thread preventing concurrent access to the model stream while // allowing multiple index requests to be in flight. indexPart(chunkIterator.getCurrentPart().get(), chunkIterator.getTotalParts(), size, definition, countingListener.acquire(r -> { - downloadExecutor.execute(() -> doNextPart(size, chunkIterator, countingListener)); + executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); })); } catch (Exception e) { countingListener.acquire().onFailure(e); @@ -200,7 +198,7 @@ private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray true ); - writePartExecutor.execute(() -> client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener)); + client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); } private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator) { diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index 370fcace786e6..59c6f0f63e04e 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -15,10 +15,8 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; @@ -37,7 +35,6 @@ import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -51,10 +48,7 @@ public class ModelImporterTests extends ESTestCase { @Before public void createThreadPool() { - threadPool = createThreadPool( - MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY), - new ScalingExecutorBuilder("ml_utility", 1, 4, TimeValue.timeValueMinutes(10), false, "xpack.ml.utility_thread_pool") - ); + threadPool = createThreadPool(MachineLearningPackageLoader.modelDownloadExecutor(Settings.EMPTY)); } @After @@ -168,7 +162,7 @@ public void testPutFailure() throws InterruptedException { latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); - verify(client, atLeastOnce()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } public void testReadFailure() throws IOException, InterruptedException { From 658b9030cf4e45ecb2c0e8324e38014955c8bfc0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 3 Sep 2024 14:31:08 +0100 Subject: [PATCH 17/58] use range request --- .../MachineLearningPackageLoader.java | 3 +- .../packageloader/action/ModelImporter.java | 186 +++++++++++------- .../action/ModelLoaderUtils.java | 159 +++++++++++++-- .../MachineLearningPackageLoaderTests.java | 2 +- .../action/ModelImporterTests.java | 19 +- .../action/ModelLoaderUtilsTests.java | 40 +++- 6 files changed, 299 insertions(+), 110 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index c502a6840c0db..a9c18d3358900 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.core.ml.packageloader.action.GetTrainedModelPackageConfigAction; import org.elasticsearch.xpack.core.ml.packageloader.action.LoadTrainedModelPackageAction; import org.elasticsearch.xpack.ml.packageloader.action.ModelDownloadTask; +import org.elasticsearch.xpack.ml.packageloader.action.ModelImporter; import org.elasticsearch.xpack.ml.packageloader.action.TransportGetTrainedModelPackageConfigAction; import org.elasticsearch.xpack.ml.packageloader.action.TransportLoadTrainedModelPackage; @@ -95,7 +96,7 @@ public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { return new FixedExecutorBuilder( settings, MODEL_DOWNLOAD_THREADPOOL_NAME, - 1, // 1 thread + ModelImporter.MAX_IN_FLIGHT_REQUESTS, 50, // max items in queue "xpack.ml.model_download_thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 09f8c6be152f9..15566b6641b40 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -24,11 +24,14 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; -import java.io.InputStream; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.core.Strings.format; @@ -44,9 +47,9 @@ * this is ensured by using a fixed size threadpool with a single * thread. */ -class ModelImporter { +public class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - private static final int MAX_IN_FLIGHT_REQUESTS = 5; + public static final int MAX_IN_FLIGHT_REQUESTS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; @@ -54,16 +57,24 @@ class ModelImporter { private final ModelDownloadTask task; private final ExecutorService executorService; private final AtomicBoolean listenerIsClosed = new AtomicBoolean(false); + private final AtomicInteger progressCounter = new AtomicInteger(); + private final URI uri; - ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) { + ModelImporter(Client client, String modelId, ModelPackageConfig packageConfig, ModelDownloadTask task, ThreadPool threadPool) + throws URISyntaxException { this.client = client; this.modelId = Objects.requireNonNull(modelId); this.config = Objects.requireNonNull(packageConfig); this.task = Objects.requireNonNull(task); this.executorService = threadPool.executor(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME); + this.uri = ModelLoaderUtils.resolvePackageLocation( + config.getModelRepository(), + config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION + ); } public void doImport(ActionListener listener) { + // todo file import executorService.execute(() -> doImportInternal(listener)); } @@ -75,19 +86,9 @@ private void doImportInternal(ActionListener finalListener Thread.currentThread().getName() ); - long size = config.getSize(); - // simple round up - int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); - InputStream modelInputStream; ModelLoaderUtils.VocabularyParts vocabularyParts = null; try { - URI uri = ModelLoaderUtils.resolvePackageLocation( - config.getModelRepository(), - config.getPackagedModelId() + ModelLoaderUtils.MODEL_FILE_EXTENSION - ); - modelInputStream = ModelLoaderUtils.getInputStreamFromModelRepository(uri); - if (config.getVocabularyFile() != null) { vocabularyParts = ModelLoaderUtils.loadVocabulary( ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) @@ -98,38 +99,53 @@ private void doImportInternal(ActionListener finalListener return; } - downloadParts( - new ModelLoaderUtils.InputStreamChunker(modelInputStream, DEFAULT_CHUNK_SIZE, totalParts), - size, - vocabularyParts, - finalListener - ); + downloadModelDefinition(config.getSize(), vocabularyParts, finalListener); } - void downloadParts( - ModelLoaderUtils.InputStreamChunker chunkIterator, + void downloadModelDefinition( long size, @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener finalListener ) { - var countingListener = new RefCountingListener(1, finalListener.map(ignored -> { - checkDownloadComplete(chunkIterator); - return AcknowledgedResponse.TRUE; - })); - // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the - // download is complete - if (vocabularyParts != null) { - uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { - logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - })); + // simple round up + int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + var ranges = ModelLoaderUtils.split(size, MAX_IN_FLIGHT_REQUESTS, DEFAULT_CHUNK_SIZE); + + var downloaders = new ArrayList(); + for (var range : ranges) { + downloaders.add(new ModelLoaderUtils.HttStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); } - for (int part = 0; part < MAX_IN_FLIGHT_REQUESTS; ++part) { - doNextPart(size, chunkIterator, countingListener); + try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { + var finalDownloader = downloaders.get(downloaders.size() - 1); + downloadFinalPart(size, totalParts, finalDownloader, finalListener.delegateFailureAndWrap((l, r) -> { + checkDownloadComplete(downloaders); + l.onResponse(AcknowledgedResponse.TRUE); + })); + }), finalListener::onFailure))) { + // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the + // download is complete + if (vocabularyParts != null) { + uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { + logger.info(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); + } + + for (int streamSplit = 0; streamSplit < MAX_IN_FLIGHT_REQUESTS; ++streamSplit) { + final var downloader = downloaders.get(streamSplit); + var doLast = countingListener.acquire(); + executorService.execute(() -> downloadPartsInRange(size, totalParts, downloader, countingListener, doLast)); + } } } - private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIterator, RefCountingListener countingListener) { + private void downloadPartsInRange( + long size, + int totalParts, + ModelLoaderUtils.HttStreamChunker downloadChunker, + RefCountingListener countingListener, + ActionListener doLast + ) { assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) : format( "Model download must execute from [%s] but thread is [%s]", @@ -137,42 +153,61 @@ private void doNextPart(long size, ModelLoaderUtils.InputStreamChunker chunkIter Thread.currentThread().getName() ); - if (countingListener.isFailing()) { - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); + while (downloadChunker.hasNext()) { + if (countingListener.isFailing()) { + if (listenerIsClosed.compareAndSet(false, true)) { + logger.info("is failing"); + countingListener.close(); + } + return; } - return; - } - - task.setProgress(chunkIterator.getTotalParts(), Math.max(0, chunkIterator.getCurrentPart().get())); - try { - BytesArray definition = chunkIterator.next(); if (task.isCancelled()) { + logger.info("task cancelled"); throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); } - if (definition.length() == 0) { - // download complete + try { + var bytesAndIndex = downloadChunker.next(); + task.setProgress(totalParts, progressCounter.getAndIncrement()); + logger.info("Progress " + progressCounter.get() + " , " + totalParts); + + indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), countingListener.acquire(ack -> {})); + } catch (Exception e) { + logger.info("errr", e); + countingListener.acquire().onFailure(e); if (listenerIsClosed.compareAndSet(false, true)) { countingListener.close(); } - return; } + } - // Index the downloaded chunk and schedule the next download once - // the chunk is written. - // The key thing here is that the threadpool only has a single - // thread preventing concurrent access to the model stream while - // allowing multiple index requests to be in flight. - indexPart(chunkIterator.getCurrentPart().get(), chunkIterator.getTotalParts(), size, definition, countingListener.acquire(r -> { - executorService.execute(() -> doNextPart(size, chunkIterator, countingListener)); - })); + logger.info("split complete " + downloadChunker.getCurrentPart()); + doLast.onResponse(null); + } + + private void downloadFinalPart( + long size, + int totalParts, + ModelLoaderUtils.HttStreamChunker downloader, + ActionListener lastPartWrittenListener + ) { + assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) + : format( + "Model download must execute from [%s] but thread is [%s]", + MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME, + Thread.currentThread().getName() + ); + + logger.info("final part"); + + try { + var bytesAndIndex = downloader.next(); + task.setProgress(totalParts, progressCounter.getAndIncrement()); + + indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), lastPartWrittenListener); } catch (Exception e) { - countingListener.acquire().onFailure(e); - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); - } + lastPartWrittenListener.onFailure(e); } } @@ -201,27 +236,26 @@ private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); } - private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker chunkIterator) { - if (config.getSha256().equals(chunkIterator.getSha256()) == false) { - String message = format( - "Model sha256 checksums do not match, expected [%s] but got [%s]", - config.getSha256(), - chunkIterator.getSha256() - ); - - throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); - } + private void checkDownloadComplete(List downloaders) { + // if (config.getSha256().equals(chunkIterator.getSha256()) == false) { + // String message = format( + // "Model sha256 checksums do not match, expected [%s] but got [%s]", + // config.getSha256(), + // chunkIterator.getSha256() + // ); + // + // throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + // } - if (config.getSize() != chunkIterator.getTotalBytesRead()) { - String message = format( - "Model size does not match, expected [%d] but got [%d]", - config.getSize(), - chunkIterator.getTotalBytesRead() - ); + long readSize = downloaders.stream().mapToLong(ModelLoaderUtils.HttStreamChunker::getTotalBytesRead).sum(); + if (config.getSize() != readSize) { + String message = format("Model size does not match, expected [%d] but got [%d]", config.getSize(), readSize); throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); } - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, chunkIterator.getTotalParts())); + int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttStreamChunker::getCurrentPart).sum(); + + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 4a0fcc5b3d59d..c9d071725efb9 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentParser; @@ -34,6 +35,7 @@ import java.security.AccessController; import java.security.MessageDigest; import java.security.PrivilegedAction; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -46,6 +48,7 @@ import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; +import static java.net.HttpURLConnection.HTTP_PARTIAL; import static java.net.HttpURLConnection.HTTP_SEE_OTHER; /** @@ -63,23 +66,39 @@ final class ModelLoaderUtils { record VocabularyParts(List vocab, List merges, List scores) {} - static class InputStreamChunker { + // Range in bytes + record RequestRange(long rangeStart, long rangeEnd, int startPart, int numParts) { + public String bytesRange() { + return "bytes=" + rangeStart + "-" + rangeEnd; + } + } + + // This class is not implemented as a Iterator because next() can throw + // but it has a similar interface + static class HttStreamChunker { + + record BytesAndPartIndex(BytesArray bytes, int partIndex) {} private final InputStream inputStream; private final MessageDigest digestSha256 = MessageDigests.sha256(); private final int chunkSize; - private final int totalParts; private final AtomicLong totalBytesRead = new AtomicLong(); - private final AtomicInteger currentPart = new AtomicInteger(-1); + private final AtomicInteger currentPart; + private final int lastPartNumber; - InputStreamChunker(InputStream inputStream, int chunkSize, int totalParts) { + HttStreamChunker(URI uri, RequestRange range, int chunkSize) { + var inputStream = getHttpOrHttpsInputStream(uri, range); this.inputStream = inputStream; this.chunkSize = chunkSize; - this.totalParts = totalParts; + this.lastPartNumber = range.startPart() + range.numParts(); + this.currentPart = new AtomicInteger(range.startPart()); } - public BytesArray next() throws IOException { + public boolean hasNext() { + return currentPart.get() < lastPartNumber; + } + public BytesAndPartIndex next() throws IOException { int bytesRead = 0; byte[] buf = new byte[chunkSize]; @@ -95,10 +114,9 @@ public BytesArray next() throws IOException { if (bytesRead > 0) { digestSha256.update(buf, 0, bytesRead); totalBytesRead.addAndGet(bytesRead); - currentPart.incrementAndGet(); - return new BytesArray(buf, 0, bytesRead); + return new BytesAndPartIndex(new BytesArray(buf, 0, bytesRead), currentPart.getAndIncrement()); } else { - return BytesArray.EMPTY; + return new BytesAndPartIndex(BytesArray.EMPTY, currentPart.get()); } } @@ -110,23 +128,59 @@ public long getTotalBytesRead() { return totalBytesRead.get(); } - public int getTotalParts() { - return totalParts; + public int getCurrentPart() { + return currentPart.get(); + } + } + + static class InputStreamChunker { + + private final InputStream inputStream; + private final MessageDigest digestSha256 = MessageDigests.sha256(); + private final int chunkSize; + + private int totalBytesRead = 0; + + InputStreamChunker(InputStream inputStream, int chunkSize) { + this.inputStream = inputStream; + this.chunkSize = chunkSize; + } + + public BytesArray next() throws IOException { + int bytesRead = 0; + byte[] buf = new byte[chunkSize]; + + while (bytesRead < chunkSize) { + int read = inputStream.read(buf, bytesRead, chunkSize - bytesRead); + // EOF?? + if (read == -1) { + break; + } + bytesRead += read; + } + digestSha256.update(buf, 0, bytesRead); + totalBytesRead += bytesRead; + + return new BytesArray(buf, 0, bytesRead); + } + + public String getSha256() { + return MessageDigests.toHexString(digestSha256.digest()); } - public AtomicInteger getCurrentPart() { - return currentPart; + public int getTotalBytesRead() { + return totalBytesRead; } } - static InputStream getInputStreamFromModelRepository(URI uri) throws IOException { + static InputStream getInputStreamFromModelRepository(URI uri) { String scheme = uri.getScheme().toLowerCase(Locale.ROOT); // if you add a scheme here, also add it to the bootstrap check in {@link MachineLearningPackageLoader#validateModelRepository} switch (scheme) { case "http": case "https": - return getHttpOrHttpsInputStream(uri); + return getHttpOrHttpsInputStream(uri, null); case "file": return getFileInputStream(uri); default: @@ -134,6 +188,11 @@ static InputStream getInputStreamFromModelRepository(URI uri) throws IOException } } + static boolean uriIsFile(URI uri) { + String scheme = uri.getScheme().toLowerCase(Locale.ROOT); + return "file".equals(scheme); + } + static VocabularyParts loadVocabulary(URI uri) { if (uri.getPath().endsWith(".json")) { try (InputStream vocabInputStream = getInputStreamFromModelRepository(uri)) { @@ -192,7 +251,7 @@ private ModelLoaderUtils() {} @SuppressWarnings("'java.lang.SecurityManager' is deprecated and marked for removal ") @SuppressForbidden(reason = "we need socket connection to download") - private static InputStream getHttpOrHttpsInputStream(URI uri) { + private static InputStream getHttpOrHttpsInputStream(URI uri, @Nullable RequestRange range) { assert uri.getUserInfo() == null : "URI's with credentials are not supported"; @@ -204,18 +263,30 @@ private static InputStream getHttpOrHttpsInputStream(URI uri) { PrivilegedAction privilegedHttpReader = () -> { try { HttpURLConnection conn = (HttpURLConnection) uri.toURL().openConnection(); + if (range != null) { + conn.setRequestProperty("Range", range.bytesRange()); + } switch (conn.getResponseCode()) { case HTTP_OK: + case HTTP_PARTIAL: return conn.getInputStream(); + case HTTP_MOVED_PERM: case HTTP_MOVED_TEMP: case HTTP_SEE_OTHER: throw new IllegalStateException("redirects aren't supported yet"); case HTTP_NOT_FOUND: throw new ResourceNotFoundException("{} not found", uri); + case 416: // Range not satisfiable, for some reason not in the list of constants + throw new IllegalStateException("Invalid range [" + range.bytesRange() + "]"); default: int responseCode = conn.getResponseCode(); - throw new ElasticsearchStatusException("error during downloading {}", RestStatus.fromCode(responseCode), uri); + throw new ElasticsearchStatusException( + "error during downloading {}. Got response code {}", + RestStatus.fromCode(responseCode), + uri, + responseCode + ); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -250,4 +321,58 @@ private static InputStream getFileInputStream(URI uri) { return AccessController.doPrivileged(privilegedFileReader); } + /** + * Split a stream of size {@code sizeInBytes} into {@code numberOfStreams} +1 + * ranges aligned on {@code chunkSizeBytes} boundaries. Each range contains a + * whole number of chunks. + * The first {@code numberOfStreams} ranges will be split evenly (in terms of + * number of chunks not the byte size), the final range split + * is for the single final chunk and will be {@code chunkSizeBytes} in size. + * + * This odd behaviour is because when streaming and uploading a large model + * definition writing the last part has to handled as a special case. + * + * @param sizeInBytes The total size of the stream + * @param numberOfStreams Divide the bulk of the size into this many streams. + * @param chunkSizeBytes The size of each chunk + * @return List of {@code numberOfStreams} + 1 ranges. + */ + static List split(long sizeInBytes, int numberOfStreams, long chunkSizeBytes) { + int numberOfChunks = (int) ((sizeInBytes + chunkSizeBytes - 1) / chunkSizeBytes); + + if (numberOfStreams == 1) { + return List.of(new RequestRange(0, sizeInBytes, 0, numberOfChunks)); + } + + var ranges = new ArrayList(); + + int baseChunksPerStream = numberOfChunks / numberOfStreams; + int remainder = numberOfChunks % numberOfStreams; + long startOffset = 0; + int startChunkIndex = 0; + + for (int i = 0; i < numberOfStreams - 1; i++) { + int numChunksInStream = (i < remainder) ? baseChunksPerStream + 1 : baseChunksPerStream; + long rangeEnd = startOffset + (numChunksInStream * chunkSizeBytes) - 1; // range index is 0 based + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, numChunksInStream)); + startOffset = rangeEnd + 1; // range is inclusive start and end + startChunkIndex += numChunksInStream; + } + + // Want the final range request to be a single chunk + if (baseChunksPerStream > 1) { + int numChunksExcludingFinal = baseChunksPerStream - 1; + long rangeEnd = startOffset + (numChunksExcludingFinal * chunkSizeBytes) - 1; + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, numChunksExcludingFinal)); + + startOffset = rangeEnd + 1; + startChunkIndex += numChunksExcludingFinal; + } + + // The final range is a single chunk and should not exceed the + long rangeEnd = Math.min(sizeInBytes, startOffset + (baseChunksPerStream * chunkSizeBytes)) - 1; + ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, 1)); + + return ranges; + } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java index eb9b212935f24..2e487b6a9624c 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoaderTests.java @@ -90,6 +90,6 @@ public void testThreadPoolHasSingleThread() { List> settings = fixedThreadPool.getRegisteredSettings(); var sizeSettting = settings.stream().filter(s -> s.getKey().startsWith("xpack.ml.model_download_thread_pool")).findFirst(); assertTrue(sizeSettting.isPresent()); - assertEquals(1, sizeSettting.get().get(Settings.EMPTY)); + assertEquals(5, sizeSettting.get().get(Settings.EMPTY)); } } diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index 59c6f0f63e04e..af3d24837c735 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ml.packageloader.action; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.support.ActionTestUtils; @@ -15,7 +14,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; @@ -25,19 +23,14 @@ import org.junit.After; import org.junit.Before; -import java.io.ByteArrayInputStream; -import java.io.IOException; +import java.net.URISyntaxException; import java.util.List; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -56,7 +49,7 @@ public void closeThreadPool() { threadPool.close(); } - public void testDownload() throws InterruptedException { + public void testDownload() throws InterruptedException, URISyntaxException { var client = mockClient(false); var task = ModelDownloadTaskTests.testTask(); var config = mock(ModelPackageConfig.class); @@ -67,7 +60,6 @@ public void testDownload() throws InterruptedException { int chunkSize = 10; long size = totalParts * chunkSize; var modelDef = modelDefinition(totalParts, chunkSize); - var stream = mockStreamChunker(modelDef, totalParts, chunkSize); var digest = computeDigest(modelDef, totalParts, chunkSize); when(config.getSha256()).thenReturn(digest); @@ -75,15 +67,15 @@ public void testDownload() throws InterruptedException { var latch = new CountDownLatch(1); var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); - importer.downloadParts(stream, size, vocab, latchedListener); + importer.downloadModelDefinition(size, vocab, latchedListener); latch.await(); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); assertEquals(totalParts - 1, task.getStatus().downloadProgress().downloadedParts()); assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); } - - public void testSizeMismatch() throws InterruptedException { + /* + public void testSizeMismatch() throws InterruptedException, URISyntaxException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); var config = mock(ModelPackageConfig.class); @@ -223,6 +215,7 @@ private ModelLoaderUtils.InputStreamChunker mockStreamChunker(byte[] modelDef, i var modelDefStream = new ByteArrayInputStream(modelDef); return new ModelLoaderUtils.InputStreamChunker(modelDefStream, chunkSize, totalPart); } + */ private byte[] modelDefinition(int totalParts, int chunkSize) { var bytes = new byte[totalParts * chunkSize]; diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java index 1979ed7ca5ab4..f421a7b44e7f1 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtilsTests.java @@ -17,6 +17,7 @@ import java.nio.charset.StandardCharsets; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.core.Is.is; public class ModelLoaderUtilsTests extends ESTestCase { @@ -84,8 +85,7 @@ public void testSha256AndSize() throws IOException { ModelLoaderUtils.InputStreamChunker inputStreamChunker = new ModelLoaderUtils.InputStreamChunker( new ByteArrayInputStream(bytes), - chunkSize, - totalParts + chunkSize ); for (int part = 0; part < totalParts - 1; ++part) { @@ -112,4 +112,40 @@ public void testParseVocabulary() throws IOException { assertThat(parsedVocab.merges(), contains("mergefoo", "mergebar", "mergebaz")); assertThat(parsedVocab.scores(), contains(1.0, 2.0, 3.0)); } + + public void testSplitIntoRanges() { + long totalSize = randomLongBetween(10_000, 50_000_000); + int numStreams = randomIntBetween(1, 10); + int chunkSize = 1024; + var ranges = ModelLoaderUtils.split(totalSize, numStreams, chunkSize); + assertThat(ranges, hasSize(numStreams + 1)); + + int expectedNumChunks = (int) ((totalSize + chunkSize - 1) / chunkSize); + assertThat(ranges.stream().mapToInt(ModelLoaderUtils.RequestRange::numParts).sum(), is(expectedNumChunks)); + + long startBytes = 0; + int startPartIndex = 0; + for (int i = 0; i < ranges.size() - 1; i++) { + assertThat(ranges.get(i).rangeStart(), is(startBytes)); + long end = startBytes + ((long) ranges.get(i).numParts() * chunkSize) - 1; + assertThat(ranges.get(i).rangeEnd(), is(end)); + long expectedNumBytesInRange = (long) chunkSize * ranges.get(i).numParts() - 1; + assertThat(ranges.get(i).rangeEnd() - ranges.get(i).rangeStart(), is(expectedNumBytesInRange)); + assertThat(ranges.get(i).startPart(), is(startPartIndex)); + + startBytes = end + 1; + startPartIndex += ranges.get(i).numParts(); + } + + var finalRange = ranges.get(ranges.size() - 1); + assertThat(finalRange.rangeStart(), is(startBytes)); + assertThat(finalRange.rangeEnd(), is(totalSize - 1)); + assertThat(finalRange.numParts(), is(1)); + } + + public void testRangeRequestBytesRange() { + long start = randomLongBetween(0, 2 << 10); + long end = randomLongBetween(start + 1, 2 << 11); + assertEquals("bytes=" + start + "-" + end, new ModelLoaderUtils.RequestRange(start, end, 0, 1).bytesRange()); + } } From 1d9f5b53358751e7966ce1c750905e512b31c24c Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 9 Sep 2024 15:10:31 +0100 Subject: [PATCH 18/58] Use multiple connections --- .../MachineLearningPackageLoader.java | 4 +- .../packageloader/action/ModelImporter.java | 155 ++++++++++++------ .../action/ModelLoaderUtils.java | 29 ++-- .../action/ModelImporterTests.java | 142 +++++++++++----- 4 files changed, 216 insertions(+), 114 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index a9c18d3358900..efe09f5060958 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -96,8 +96,8 @@ public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { return new FixedExecutorBuilder( settings, MODEL_DOWNLOAD_THREADPOOL_NAME, - ModelImporter.MAX_IN_FLIGHT_REQUESTS, - 50, // max items in queue + ModelImporter.NUMBER_OF_STREAMS, + ModelImporter.NUMBER_OF_STREAMS * 2, // max items in queue "xpack.ml.model_download_thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 15566b6641b40..e2f30e035be1e 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ModelPackageConfig; import org.elasticsearch.xpack.ml.packageloader.MachineLearningPackageLoader; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -36,20 +37,23 @@ import static org.elasticsearch.core.Strings.format; /** - * For downloading the model vocabulary and definition file and + * For downloading and the vocabulary and model definition file and * indexing those files in Elasticsearch. * Holding the large model definition file in memory will consume * too much memory, instead it is streamed in chunks and each chunk - * written to the index in a non-blocking request. The number of - * index requests is limited to {@link #MAX_IN_FLIGHT_REQUESTS} - * also to prevent too much memory being used. - * Only 1 thread can read the model definition stream at a time, - * this is ensured by using a fixed size threadpool with a single - * thread. + * written to the index in a non-blocking request. + * The model files may be installed from a local file or download + * from a server. The server download uses {@link #NUMBER_OF_STREAMS} + * connections each using the Range header to split the stream by byte + * range. There is a complication in that the final part of the model + * definition must be uploaded last as writing this part causes an index + * refresh. + * When read from file a single thread is used to read the file + * stream, split into chunks and index those chunks. */ public class ModelImporter { private static final int DEFAULT_CHUNK_SIZE = 1024 * 1024; // 1MB - public static final int MAX_IN_FLIGHT_REQUESTS = 5; + public static final int NUMBER_OF_STREAMS = 5; private static final Logger logger = LogManager.getLogger(ModelImporter.class); private final Client client; private final String modelId; @@ -74,7 +78,6 @@ public class ModelImporter { } public void doImport(ActionListener listener) { - // todo file import executorService.execute(() -> doImportInternal(listener)); } @@ -94,28 +97,38 @@ private void doImportInternal(ActionListener finalListener ModelLoaderUtils.resolvePackageLocation(config.getModelRepository(), config.getVocabularyFile()) ); } + + // simple round up + int totalParts = (int) ((config.getSize() + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); + + if (ModelLoaderUtils.uriIsFile(uri) == false) { + var ranges = ModelLoaderUtils.split(config.getSize(), NUMBER_OF_STREAMS, DEFAULT_CHUNK_SIZE); + var downloaders = new ArrayList(ranges.size()); + for (var range : ranges) { + downloaders.add(new ModelLoaderUtils.HttStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); + } + downloadModelDefinition(config.getSize(), totalParts, vocabularyParts, downloaders, finalListener); + } else { + InputStream modelInputStream = ModelLoaderUtils.getFileInputStream(uri); + ModelLoaderUtils.InputStreamChunker chunkIterator = new ModelLoaderUtils.InputStreamChunker( + modelInputStream, + DEFAULT_CHUNK_SIZE + ); + readModelDefinitionFromFile(config.getSize(), totalParts, chunkIterator, vocabularyParts, finalListener); + } } catch (Exception e) { finalListener.onFailure(e); return; } - - downloadModelDefinition(config.getSize(), vocabularyParts, finalListener); } void downloadModelDefinition( long size, + int totalParts, @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, + List downloaders, ActionListener finalListener ) { - // simple round up - int totalParts = (int) ((size + DEFAULT_CHUNK_SIZE - 1) / DEFAULT_CHUNK_SIZE); - var ranges = ModelLoaderUtils.split(size, MAX_IN_FLIGHT_REQUESTS, DEFAULT_CHUNK_SIZE); - - var downloaders = new ArrayList(); - for (var range : ranges) { - downloaders.add(new ModelLoaderUtils.HttStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); - } - try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { var finalDownloader = downloaders.get(downloaders.size() - 1); downloadFinalPart(size, totalParts, finalDownloader, finalListener.delegateFailureAndWrap((l, r) -> { @@ -126,12 +139,12 @@ void downloadModelDefinition( // Uploading other artefacts of the model first, that way the model is last and a simple search can be used to check if the // download is complete if (vocabularyParts != null) { - uploadVocabulary(vocabularyParts, countingListener.acquire(r -> { - logger.info(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); - })); + uploadVocabulary(vocabularyParts, countingListener); } - for (int streamSplit = 0; streamSplit < MAX_IN_FLIGHT_REQUESTS; ++streamSplit) { + // Download all but the final split. + // The final split is a single chunk + for (int streamSplit = 0; streamSplit < downloaders.size() - 1; ++streamSplit) { final var downloader = downloaders.get(streamSplit); var doLast = countingListener.acquire(); executorService.execute(() -> downloadPartsInRange(size, totalParts, downloader, countingListener, doLast)); @@ -156,25 +169,18 @@ private void downloadPartsInRange( while (downloadChunker.hasNext()) { if (countingListener.isFailing()) { if (listenerIsClosed.compareAndSet(false, true)) { - logger.info("is failing"); countingListener.close(); } return; } - if (task.isCancelled()) { - logger.info("task cancelled"); - throw new TaskCancelledException(format("task cancelled with reason [%s]", task.getReasonCancelled())); - } - try { + throwIfTaskCancelled(); var bytesAndIndex = downloadChunker.next(); task.setProgress(totalParts, progressCounter.getAndIncrement()); - logger.info("Progress " + progressCounter.get() + " , " + totalParts); indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), countingListener.acquire(ack -> {})); } catch (Exception e) { - logger.info("errr", e); countingListener.acquire().onFailure(e); if (listenerIsClosed.compareAndSet(false, true)) { countingListener.close(); @@ -182,7 +188,6 @@ private void downloadPartsInRange( } } - logger.info("split complete " + downloadChunker.getCurrentPart()); doLast.onResponse(null); } @@ -199,8 +204,6 @@ private void downloadFinalPart( Thread.currentThread().getName() ); - logger.info("final part"); - try { var bytesAndIndex = downloader.next(); task.setProgress(totalParts, progressCounter.getAndIncrement()); @@ -211,7 +214,37 @@ private void downloadFinalPart( } } - private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, ActionListener listener) { + void readModelDefinitionFromFile( + long size, + int totalParts, + ModelLoaderUtils.InputStreamChunker chunkIterator, + @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, + ActionListener finalListener + ) { + try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { + finalListener.onResponse(AcknowledgedResponse.TRUE); + }), finalListener::onFailure))) { + try { + if (vocabularyParts != null) { + uploadVocabulary(vocabularyParts, countingListener); + } + + for (int part = 0; part < totalParts; ++part) { + throwIfTaskCancelled(); + task.setProgress(totalParts, part); + BytesArray definition = chunkIterator.next(); + indexPart(part, totalParts, size, definition, countingListener.acquire(ack -> {})); + } + task.setProgress(totalParts, totalParts); + + checkDownloadComplete(chunkIterator, totalParts); + } catch (Exception e) { + countingListener.acquire().onFailure(e); + } + } + } + + private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, RefCountingListener countingListener) { PutTrainedModelVocabularyAction.Request request = new PutTrainedModelVocabularyAction.Request( modelId, vocabularyParts.vocab(), @@ -220,7 +253,9 @@ private void uploadVocabulary(ModelLoaderUtils.VocabularyParts vocabularyParts, true ); - client.execute(PutTrainedModelVocabularyAction.INSTANCE, request, listener); + client.execute(PutTrainedModelVocabularyAction.INSTANCE, request, countingListener.acquire(r -> { + logger.debug(() -> format("[%s] imported model vocabulary [%s]", modelId, config.getVocabularyFile())); + })); } private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray bytes, ActionListener listener) { @@ -237,25 +272,39 @@ private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray } private void checkDownloadComplete(List downloaders) { - // if (config.getSha256().equals(chunkIterator.getSha256()) == false) { - // String message = format( - // "Model sha256 checksums do not match, expected [%s] but got [%s]", - // config.getSha256(), - // chunkIterator.getSha256() - // ); - // - // throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); - // } - - long readSize = downloaders.stream().mapToLong(ModelLoaderUtils.HttStreamChunker::getTotalBytesRead).sum(); - - if (config.getSize() != readSize) { - String message = format("Model size does not match, expected [%d] but got [%d]", config.getSize(), readSize); + long totalBytesRead = downloaders.stream().mapToLong(ModelLoaderUtils.HttStreamChunker::getTotalBytesRead).sum(); + int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttStreamChunker::getCurrentPart).sum(); + checkSize(totalBytesRead); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + } + + private void checkDownloadComplete(ModelLoaderUtils.InputStreamChunker fileInputStream, int totalParts) { + checkSha256(fileInputStream.getSha256()); + checkSize(fileInputStream.getTotalBytesRead()); + logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + } + + private void checkSha256(String sha256) { + if (config.getSha256().equals(sha256) == false) { + String message = format("Model sha256 checksums do not match, expected [%s] but got [%s]", config.getSha256(), sha256); + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); } + } - int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttStreamChunker::getCurrentPart).sum(); + private void checkSize(long definitionSize) { + if (config.getSize() != definitionSize) { + String message = format("Model size does not match, expected [%d] but got [%d]", config.getSize(), definitionSize); + throw new ElasticsearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR); + } + } - logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); + private void throwIfTaskCancelled() { + if (task.isCancelled()) { + logger.info("Model [{}] download task cancelled", modelId); + throw new TaskCancelledException( + format("Model [%s] download task cancelled with reason [%s]", modelId, task.getReasonCancelled()) + ); + } } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index c9d071725efb9..5c04863d7a3ae 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -73,14 +73,11 @@ public String bytesRange() { } } - // This class is not implemented as a Iterator because next() can throw - // but it has a similar interface static class HttStreamChunker { record BytesAndPartIndex(BytesArray bytes, int partIndex) {} private final InputStream inputStream; - private final MessageDigest digestSha256 = MessageDigests.sha256(); private final int chunkSize; private final AtomicLong totalBytesRead = new AtomicLong(); private final AtomicInteger currentPart; @@ -94,6 +91,14 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} this.currentPart = new AtomicInteger(range.startPart()); } + // This ctor exists for testing purposes only. + HttStreamChunker(InputStream inputStream, RequestRange range, int chunkSize) { + this.inputStream = inputStream; + this.chunkSize = chunkSize; + this.lastPartNumber = range.startPart() + range.numParts(); + this.currentPart = new AtomicInteger(range.startPart()); + } + public boolean hasNext() { return currentPart.get() < lastPartNumber; } @@ -112,7 +117,6 @@ public BytesAndPartIndex next() throws IOException { } if (bytesRead > 0) { - digestSha256.update(buf, 0, bytesRead); totalBytesRead.addAndGet(bytesRead); return new BytesAndPartIndex(new BytesArray(buf, 0, bytesRead), currentPart.getAndIncrement()); } else { @@ -120,10 +124,6 @@ public BytesAndPartIndex next() throws IOException { } } - public String getSha256() { - return MessageDigests.toHexString(digestSha256.digest()); - } - public long getTotalBytesRead() { return totalBytesRead.get(); } @@ -298,7 +298,7 @@ private static InputStream getHttpOrHttpsInputStream(URI uri, @Nullable RequestR @SuppressWarnings("'java.lang.SecurityManager' is deprecated and marked for removal ") @SuppressForbidden(reason = "we need load model data from a file") - private static InputStream getFileInputStream(URI uri) { + static InputStream getFileInputStream(URI uri) { SecurityManager sm = System.getSecurityManager(); if (sm != null) { @@ -328,10 +328,9 @@ private static InputStream getFileInputStream(URI uri) { * The first {@code numberOfStreams} ranges will be split evenly (in terms of * number of chunks not the byte size), the final range split * is for the single final chunk and will be {@code chunkSizeBytes} in size. - * - * This odd behaviour is because when streaming and uploading a large model - * definition writing the last part has to handled as a special case. - * + * The separate range for the final chunk is because when streaming and + * uploading a large model definition, writing the last part has to handled + * as a special case. * @param sizeInBytes The total size of the stream * @param numberOfStreams Divide the bulk of the size into this many streams. * @param chunkSizeBytes The size of each chunk @@ -340,10 +339,6 @@ private static InputStream getFileInputStream(URI uri) { static List split(long sizeInBytes, int numberOfStreams, long chunkSizeBytes) { int numberOfChunks = (int) ((sizeInBytes + chunkSizeBytes - 1) / chunkSizeBytes); - if (numberOfStreams == 1) { - return List.of(new RequestRange(0, sizeInBytes, 0, numberOfChunks)); - } - var ranges = new ArrayList(); int baseChunksPerStream = numberOfChunks / numberOfStreams; diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index af3d24837c735..fa9398955c271 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.packageloader.action; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.support.ActionTestUtils; @@ -14,6 +15,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; @@ -23,14 +25,20 @@ import org.junit.After; import org.junit.Before; +import java.io.ByteArrayInputStream; +import java.io.IOException; import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; +import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -49,45 +57,74 @@ public void closeThreadPool() { threadPool.close(); } - public void testDownload() throws InterruptedException, URISyntaxException { + public void testDownloadModelDefinition() throws InterruptedException, URISyntaxException { var client = mockClient(false); var task = ModelDownloadTaskTests.testTask(); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); - var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; int chunkSize = 10; long size = totalParts * chunkSize; var modelDef = modelDefinition(totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); - var digest = computeDigest(modelDef, totalParts, chunkSize); + var digest = computeDigest(modelDef); when(config.getSha256()).thenReturn(digest); when(config.getSize()).thenReturn(size); + var importer = new ModelImporter(client, "foo", config, task, threadPool); + var latch = new CountDownLatch(1); var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); - importer.downloadModelDefinition(size, vocab, latchedListener); + importer.downloadModelDefinition(size, totalParts, vocab, streamers, latchedListener); latch.await(); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); assertEquals(totalParts - 1, task.getStatus().downloadProgress().downloadedParts()); assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); } - /* + + public void testReadModelDefinitionFromFile() throws InterruptedException, URISyntaxException { + var client = mockClient(false); + var task = ModelDownloadTaskTests.testTask(); + var config = mockConfigWithRepoLinks(); + var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); + + int totalParts = 3; + int chunkSize = 10; + long size = totalParts * chunkSize; + var modelDef = modelDefinition(totalParts, chunkSize); + + var digest = computeDigest(modelDef); + when(config.getSha256()).thenReturn(digest); + when(config.getSize()).thenReturn(size); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + var streamChunker = new ModelLoaderUtils.InputStreamChunker(new ByteArrayInputStream(modelDef), chunkSize); + + var latch = new CountDownLatch(1); + var latchedListener = new LatchedActionListener(ActionTestUtils.assertNoFailureListener(ignore -> {}), latch); + importer.readModelDefinitionFromFile(size, totalParts, streamChunker, vocab, latchedListener); + + latch.await(); + verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); + assertEquals(totalParts, task.getStatus().downloadProgress().downloadedParts()); + assertEquals(totalParts, task.getStatus().downloadProgress().totalParts()); + } + public void testSizeMismatch() throws InterruptedException, URISyntaxException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); - var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; int chunkSize = 10; long size = totalParts * chunkSize; var modelDef = modelDefinition(totalParts, chunkSize); - var stream = mockStreamChunker(modelDef, totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); - var digest = computeDigest(modelDef, totalParts, chunkSize); + var digest = computeDigest(modelDef); when(config.getSha256()).thenReturn(digest); when(config.getSize()).thenReturn(size - 1); // expected size and read size are different @@ -98,24 +135,25 @@ public void testSizeMismatch() throws InterruptedException, URISyntaxException { ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), latch ); - importer.downloadParts(stream, size, null, latchedListener); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + importer.downloadModelDefinition(size, totalParts, null, streamers, latchedListener); latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("Model size does not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testDigestMismatch() throws InterruptedException { + public void testDigestMismatch() throws InterruptedException, URISyntaxException { var client = mockClient(false); var task = mock(ModelDownloadTask.class); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); - var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 5; int chunkSize = 10; long size = totalParts * chunkSize; var modelDef = modelDefinition(totalParts, chunkSize); - var stream = mockStreamChunker(modelDef, totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 2); when(config.getSha256()).thenReturn("0x"); // digest is different when(config.getSize()).thenReturn(size); @@ -126,23 +164,27 @@ public void testDigestMismatch() throws InterruptedException { ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), latch ); - importer.downloadParts(stream, size, null, latchedListener); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + // Message digest can only be calculated for the file reader + var streamChunker = new ModelLoaderUtils.InputStreamChunker(new ByteArrayInputStream(modelDef), chunkSize); + importer.readModelDefinitionFromFile(size, totalParts, streamChunker, null, latchedListener); latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("Model sha256 checksums do not match")); verify(client, times(totalParts)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testPutFailure() throws InterruptedException { + public void testPutFailure() throws InterruptedException, URISyntaxException { var client = mockClient(true); // client will fail put var task = mock(ModelDownloadTask.class); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); - var importer = new ModelImporter(client, "foo", config, task, threadPool); int totalParts = 4; int chunkSize = 10; + long size = totalParts * chunkSize; var modelDef = modelDefinition(totalParts, chunkSize); - var stream = mockStreamChunker(modelDef, totalParts, chunkSize); + var streamers = mockHttpStreamChunkers(modelDef, chunkSize, 1); var exceptionHolder = new AtomicReference(); var latch = new CountDownLatch(1); @@ -150,24 +192,27 @@ public void testPutFailure() throws InterruptedException { ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), latch ); - importer.downloadParts(stream, totalParts * chunkSize, null, latchedListener); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + importer.downloadModelDefinition(size, totalParts, null, streamers, latchedListener); latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put model part failed")); verify(client, times(1)).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - public void testReadFailure() throws IOException, InterruptedException { + public void testReadFailure() throws IOException, InterruptedException, URISyntaxException { var client = mockClient(true); var task = mock(ModelDownloadTask.class); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); - var importer = new ModelImporter(client, "foo", config, task, threadPool); - var stream = mock(ModelLoaderUtils.InputStreamChunker.class); - when(stream.next()).thenThrow(new IOException("stream failed")); // fail the read + int totalParts = 4; + int chunkSize = 10; + long size = totalParts * chunkSize; - when(stream.getTotalParts()).thenReturn(10); - when(stream.getCurrentPart()).thenReturn(new AtomicInteger()); + var streamer = mock(ModelLoaderUtils.HttStreamChunker.class); + when(streamer.hasNext()).thenReturn(true); + when(streamer.next()).thenThrow(new IOException("stream failed")); // fail the read var exceptionHolder = new AtomicReference(); var latch = new CountDownLatch(1); @@ -175,14 +220,16 @@ public void testReadFailure() throws IOException, InterruptedException { ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), latch ); - importer.downloadParts(stream, 1L, null, latchedListener); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + importer.downloadModelDefinition(size, totalParts, null, List.of(streamer), latchedListener); latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("stream failed")); } @SuppressWarnings("unchecked") - public void testUploadVocabFailure() throws InterruptedException { + public void testUploadVocabFailure() throws InterruptedException, URISyntaxException { var client = mock(Client.class); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; @@ -191,19 +238,19 @@ public void testUploadVocabFailure() throws InterruptedException { }).when(client).execute(eq(PutTrainedModelVocabularyAction.INSTANCE), any(), any()); var task = mock(ModelDownloadTask.class); - var config = mock(ModelPackageConfig.class); + var config = mockConfigWithRepoLinks(); var vocab = new ModelLoaderUtils.VocabularyParts(List.of(), List.of(), List.of()); - var importer = new ModelImporter(client, "foo", config, task, threadPool); - var stream = mock(ModelLoaderUtils.InputStreamChunker.class); var exceptionHolder = new AtomicReference(); var latch = new CountDownLatch(1); var latchedListener = new LatchedActionListener( ActionTestUtils.assertNoSuccessListener(exceptionHolder::set), latch ); - importer.downloadParts(stream, 1L, vocab, latchedListener); + + var importer = new ModelImporter(client, "foo", config, task, threadPool); + importer.downloadModelDefinition(100, 5, vocab, List.of(), latchedListener); latch.await(); assertThat(exceptionHolder.get().getMessage(), containsString("put vocab failed")); @@ -211,11 +258,18 @@ public void testUploadVocabFailure() throws InterruptedException { verify(client, never()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - private ModelLoaderUtils.InputStreamChunker mockStreamChunker(byte[] modelDef, int totalPart, int chunkSize) { - var modelDefStream = new ByteArrayInputStream(modelDef); - return new ModelLoaderUtils.InputStreamChunker(modelDefStream, chunkSize, totalPart); + private List mockHttpStreamChunkers(byte[] modelDef, int chunkSize, int numStreams) { + var ranges = ModelLoaderUtils.split(modelDef.length, numStreams, chunkSize); + + var result = new ArrayList(ranges.size()); + for (var range : ranges) { + int len = range.numParts() * chunkSize; + var modelDefStream = new ByteArrayInputStream(modelDef, (int) range.rangeStart(), len); + result.add(new ModelLoaderUtils.HttStreamChunker(modelDefStream, range, chunkSize)); + } + + return result; } - */ private byte[] modelDefinition(int totalParts, int chunkSize) { var bytes = new byte[totalParts * chunkSize]; @@ -225,12 +279,9 @@ private byte[] modelDefinition(int totalParts, int chunkSize) { return bytes; } - private String computeDigest(byte[] modelDef, int totalParts, int chunkSize) { + private String computeDigest(byte[] modelDef) { var digest = MessageDigests.sha256(); digest.update(modelDef); - // for (int i=0; i Date: Tue, 10 Sep 2024 10:04:32 +0100 Subject: [PATCH 19/58] Tidy comments --- .../xpack/ml/packageloader/action/ModelLoaderUtils.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 5c04863d7a3ae..3815ea4ac79a4 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -278,7 +278,7 @@ private static InputStream getHttpOrHttpsInputStream(URI uri, @Nullable RequestR case HTTP_NOT_FOUND: throw new ResourceNotFoundException("{} not found", uri); case 416: // Range not satisfiable, for some reason not in the list of constants - throw new IllegalStateException("Invalid range [" + range.bytesRange() + "]"); + throw new IllegalStateException("Invalid request range [" + range.bytesRange() + "]"); default: int responseCode = conn.getResponseCode(); throw new ElasticsearchStatusException( @@ -327,8 +327,8 @@ static InputStream getFileInputStream(URI uri) { * whole number of chunks. * The first {@code numberOfStreams} ranges will be split evenly (in terms of * number of chunks not the byte size), the final range split - * is for the single final chunk and will be {@code chunkSizeBytes} in size. - * The separate range for the final chunk is because when streaming and + * is for the single final chunk and will be no more than {@code chunkSizeBytes} + * in size. The separate range for the final chunk is because when streaming and * uploading a large model definition, writing the last part has to handled * as a special case. * @param sizeInBytes The total size of the stream @@ -364,7 +364,7 @@ static List split(long sizeInBytes, int numberOfStreams, long chun startChunkIndex += numChunksExcludingFinal; } - // The final range is a single chunk and should not exceed the + // The final range is a single chunk the end of which should not exceed sizeInBytes long rangeEnd = Math.min(sizeInBytes, startOffset + (baseChunksPerStream * chunkSizeBytes)) - 1; ranges.add(new RequestRange(startOffset, rangeEnd, startChunkIndex, 1)); From d37cd3a126db4b636f813c6cbc21da887324fd6f Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 10 Sep 2024 19:33:22 +0100 Subject: [PATCH 20/58] short threads --- .../MachineLearningPackageLoader.java | 2 +- .../packageloader/action/ModelImporter.java | 57 ++++++++++++------- 2 files changed, 36 insertions(+), 23 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index efe09f5060958..4a55f7e3579f5 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -97,7 +97,7 @@ public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { settings, MODEL_DOWNLOAD_THREADPOOL_NAME, ModelImporter.NUMBER_OF_STREAMS, - ModelImporter.NUMBER_OF_STREAMS * 2, // max items in queue + -1, // unbounded queue size "xpack.ml.model_download_thread_pool", EsExecutors.TaskTrackingConfig.DO_NOT_TRACK ); diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index e2f30e035be1e..114544bc8e74e 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -90,7 +90,6 @@ private void doImportInternal(ActionListener finalListener ); ModelLoaderUtils.VocabularyParts vocabularyParts = null; - try { if (config.getVocabularyFile() != null) { vocabularyParts = ModelLoaderUtils.loadVocabulary( @@ -146,18 +145,21 @@ void downloadModelDefinition( // The final split is a single chunk for (int streamSplit = 0; streamSplit < downloaders.size() - 1; ++streamSplit) { final var downloader = downloaders.get(streamSplit); - var doLast = countingListener.acquire(); - executorService.execute(() -> downloadPartsInRange(size, totalParts, downloader, countingListener, doLast)); + var rangeDownloadedListener = countingListener.acquire(); // acquire to keep the counting listener from closing + executorService.execute( + () -> downloadPartInRange(size, totalParts, downloader, executorService, countingListener, rangeDownloadedListener) + ); } } } - private void downloadPartsInRange( + private void downloadPartInRange( long size, int totalParts, ModelLoaderUtils.HttStreamChunker downloadChunker, + ExecutorService executorService, RefCountingListener countingListener, - ActionListener doLast + ActionListener rangeFullyDownloadedListener ) { assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) : format( @@ -166,29 +168,40 @@ private void downloadPartsInRange( Thread.currentThread().getName() ); - while (downloadChunker.hasNext()) { - if (countingListener.isFailing()) { - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); - } - return; + if (countingListener.isFailing()) { + if (listenerIsClosed.compareAndSet(false, true)) { + countingListener.close(); } + return; + } - try { - throwIfTaskCancelled(); - var bytesAndIndex = downloadChunker.next(); - task.setProgress(totalParts, progressCounter.getAndIncrement()); + try { + throwIfTaskCancelled(); + var bytesAndIndex = downloadChunker.next(); + task.setProgress(totalParts, progressCounter.getAndIncrement()); - indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), countingListener.acquire(ack -> {})); - } catch (Exception e) { - countingListener.acquire().onFailure(e); - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); - } + indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), countingListener.acquire(ack -> {})); + } catch (Exception e) { + countingListener.acquire().onFailure(e); + if (listenerIsClosed.compareAndSet(false, true)) { + countingListener.close(); } } - doLast.onResponse(null); + if (downloadChunker.hasNext()) { + executorService.execute( + () -> downloadPartInRange( + size, + totalParts, + downloadChunker, + executorService, + countingListener, + rangeFullyDownloadedListener + ) + ); + } else { + rangeFullyDownloadedListener.onResponse(null); + } } private void downloadFinalPart( From 73f0f6ba45c5a96e21aaf001f77b620abfc26687 Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 11 Sep 2024 11:31:30 +0200 Subject: [PATCH 21/58] Enable markSupported in SlicedInputStream (#112563) This means openSlice must be able to be called for a previous slice even if the previous slice has been closed. We disable the support for current implementations to keep their old behavior just to be sure. Relates ES-9248 --- .../blobstore/SlicedInputStream.java | 71 ++++++++++- .../recovery/SnapshotFilesProvider.java | 5 + .../blobstore/BlobStoreRepository.java | 5 + .../blobstore/SlicedInputStreamTests.java | 117 ++++++++++++++++++ .../input/MetadataCachingIndexInput.java | 17 ++- 5 files changed, 201 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java index 6ab1bb6413fa6..586b1bae4cc9a 100644 --- a/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java +++ b/server/src/main/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStream.java @@ -21,10 +21,14 @@ * close is called before. */ public abstract class SlicedInputStream extends InputStream { - private int slice = 0; + private int nextSlice = 0; private InputStream currentStream; + private int currentSliceOffset = 0; private final int numSlices; + private boolean closed = false; private boolean initialized = false; + private int markedSlice = -1; + private int markedSliceOffset = -1; /** * Creates a new SlicedInputStream @@ -36,18 +40,23 @@ protected SlicedInputStream(final int numSlices) { private InputStream nextStream() throws IOException { assert initialized == false || currentStream != null; + assert closed == false : "attempted to get next stream when closed"; initialized = true; IOUtils.close(currentStream); - if (slice < numSlices) { - currentStream = openSlice(slice++); + if (nextSlice < numSlices) { + currentStream = openSlice(nextSlice++); } else { currentStream = null; } + currentSliceOffset = 0; return currentStream; } /** * Called for each logical slice given a zero based slice ordinal. + * + * Note that if {@link InputStream#markSupported()} is true (can be overridden to return false), the function may be called again to + * open a previous slice (which must have the same size as before). The returned InputStreams do not need to support mark/reset. */ protected abstract InputStream openSlice(int slice) throws IOException; @@ -69,6 +78,7 @@ public final int read() throws IOException { nextStream(); return read(); } + currentSliceOffset++; return read; } @@ -83,14 +93,22 @@ public final int read(byte[] buffer, int offset, int length) throws IOException nextStream(); return read(buffer, offset, length); } + currentSliceOffset += read; return read; } @Override - public final void close() throws IOException { - IOUtils.close(currentStream); + public void close() throws IOException { + closed = true; initialized = true; + currentSliceOffset = 0; + final InputStream stream = currentStream; currentStream = null; + IOUtils.close(stream); + } + + public boolean isClosed() { + return closed; } @Override @@ -99,4 +117,47 @@ public final int available() throws IOException { return stream == null ? 0 : stream.available(); } + @Override + public boolean markSupported() { + return true; + } + + @Override + public void mark(int readLimit) { + // We ignore readLimit since openSlice() can re-open previous InputStreams, and we can skip as many bytes as we'd like. + // According to JDK documentation, marking a closed InputStream should have no effect. + if (markSupported() && isClosed() == false && numSlices > 0) { + if (initialized) { + markedSlice = nextSlice - 1; + markedSliceOffset = currentSliceOffset; + } else { + markedSlice = 0; + markedSliceOffset = 0; + } + } + } + + @Override + public void reset() throws IOException { + if (markSupported()) { + if (isClosed()) { + throw new IOException("reset called on a closed stream"); + } else if (numSlices > 0) { + if (markedSlice < 0 || markedSliceOffset < 0) { + throw new IOException("Mark has not been set"); + } + + // We do not call the SlicedInputStream's skipNBytes but call skipNBytes directly on the returned stream, to ensure that + // the skip is performed on the marked slice and no other slices are involved. This may help uncover any bugs. + nextSlice = markedSlice; + final InputStream stream = nextStream(); + if (stream != null) { + stream.skipNBytes(markedSliceOffset); + } + currentSliceOffset = markedSliceOffset; + } + } else { + throw new IOException("mark/reset not supported"); + } + } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java index 1424ef160657b..0a1bf765bc12d 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/SnapshotFilesProvider.java @@ -52,6 +52,11 @@ public InputStream getInputStreamForSnapshotFile( protected InputStream openSlice(int slice) throws IOException { return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } + + @Override + public boolean markSupported() { + return false; + } }; } return blobStoreRepository.maybeRateLimitRestores(inputStream, rateLimiterListener::accept); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 104cb95018312..43af0a970857b 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -3606,6 +3606,11 @@ protected InputStream openSlice(int slice) throws IOException { ensureNotClosing(store); return container.readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(slice)); } + + @Override + public boolean markSupported() { + return false; + } })) { final byte[] buffer = new byte[Math.toIntExact(Math.min(bufferSize, fileInfo.length()))]; int length; diff --git a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java index 00a657b7fdcf1..dce54b11b720c 100644 --- a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java +++ b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/SlicedInputStreamTests.java @@ -16,6 +16,7 @@ import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Random; import static org.hamcrest.Matchers.equalTo; @@ -75,7 +76,123 @@ protected InputStream openSlice(int slice) throws IOException { for (int i = 0; i < streams.length; i++) { assertTrue(streams[i].closed); } + } + + public void testRandomMarkReset() throws IOException { + final int slices = randomIntBetween(1, 20); + final var bytes = randomByteArrayOfLength(randomIntBetween(1000, 10000)); + final int sliceSize = bytes.length / slices; + + final var streamsOpened = new ArrayList(); + SlicedInputStream input = new SlicedInputStream(slices) { + @Override + protected InputStream openSlice(int slice) throws IOException { + final int sliceOffset = slice * sliceSize; + final int length = slice == slices - 1 ? bytes.length - sliceOffset : sliceSize; + final var stream = new CheckClosedInputStream(new ByteArrayInputStream(bytes, sliceOffset, length)); + streamsOpened.add(stream); + return stream; + } + }; + + // Read up to a random point + final int mark = randomIntBetween(0, bytes.length); + if (mark > 0) { + final var bytesReadUntilMark = new byte[mark]; + input.readNBytes(bytesReadUntilMark, 0, mark); + final var expectedBytesUntilMark = new ByteArrayInputStream(bytes, 0, mark).readAllBytes(); + assertArrayEquals(expectedBytesUntilMark, bytesReadUntilMark); + } + + // Reset should throw since there is no mark + expectThrows(IOException.class, input::reset); + + // Mark + input.mark(randomNonNegativeInt()); + + // Read up to another random point + final int moreBytes = randomIntBetween(0, bytes.length - mark); + if (moreBytes > 0) { + final var moreBytesRead = new byte[moreBytes]; + input.readNBytes(moreBytesRead, 0, moreBytes); + final var expectedMoreBytes = new ByteArrayInputStream(bytes, mark, moreBytes).readAllBytes(); + assertArrayEquals(expectedMoreBytes, moreBytesRead); + } + + // Reset + input.reset(); + + // Read all remaining bytes, which should be the bytes from mark up to the end + final int remainingBytes = bytes.length - mark; + if (remainingBytes > 0) { + final var remainingBytesRead = new byte[remainingBytes]; + input.readNBytes(remainingBytesRead, 0, remainingBytes); + final var expectedRemainingBytes = new ByteArrayInputStream(bytes, mark, remainingBytes).readAllBytes(); + assertArrayEquals(expectedRemainingBytes, remainingBytesRead); + } + + // Confirm we reached the end and close the stream + assertThat(input.read(), equalTo(-1)); + input.close(); + streamsOpened.forEach(stream -> assertTrue(stream.closed)); + } + + public void testMarkResetClosedStream() throws IOException { + final int slices = randomIntBetween(1, 20); + SlicedInputStream input = new SlicedInputStream(slices) { + @Override + protected InputStream openSlice(int slice) throws IOException { + return new ByteArrayInputStream(new byte[] { 0 }, 0, 1); + } + }; + + input.skip(randomIntBetween(1, slices)); + input.mark(randomNonNegativeInt()); + input.close(); + // SlicedInputStream supports reading -1 after close without throwing + assertThat(input.read(), equalTo(-1)); + expectThrows(IOException.class, input::reset); + assertThat(input.read(), equalTo(-1)); + input.mark(randomNonNegativeInt()); + assertThat(input.read(), equalTo(-1)); + } + + public void testMarkResetUnsupportedStream() throws IOException { + final int slices = randomIntBetween(1, 20); + SlicedInputStream input = new SlicedInputStream(slices) { + @Override + protected InputStream openSlice(int slice) throws IOException { + return new ByteArrayInputStream(new byte[] { 0 }, 0, 1); + } + + @Override + public boolean markSupported() { + return false; + } + }; + input.mark(randomNonNegativeInt()); + expectThrows(IOException.class, input::reset); + input.close(); + } + + public void testMarkResetZeroSlices() throws IOException { + SlicedInputStream input = new SlicedInputStream(0) { + @Override + protected InputStream openSlice(int slice) throws IOException { + throw new AssertionError("should not be called"); + } + }; + + if (randomBoolean()) { + // randomly initialize the stream + assertThat(input.read(), equalTo(-1)); + } + + input.mark(randomNonNegativeInt()); + input.reset(); + assertThat(input.read(), equalTo(-1)); + input.close(); } private int readFully(InputStream stream, byte[] buffer) throws IOException { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java index 8c978c3445526..94ba06a00cc4e 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/MetadataCachingIndexInput.java @@ -538,14 +538,6 @@ protected InputStream openInputStreamFromBlobStore(final long position, final lo private SlicedInputStream openInputStreamMultipleParts(long position, long readLength) { final int startPart = getPartNumberForPosition(position); final int endPart = getPartNumberForPosition(position + readLength - 1); - - for (int currentPart = startPart; currentPart <= endPart; currentPart++) { - final long startInPart = (currentPart == startPart) ? getRelativePositionInPart(position) : 0L; - final long endInPart; - endInPart = currentPart == endPart ? getRelativePositionInPart(position + readLength - 1) + 1 : fileInfo.partBytes(currentPart); - stats.addBlobStoreBytesRequested(endInPart - startInPart); - } - return new SlicedInputStream(endPart - startPart + 1) { @Override protected InputStream openSlice(int slice) throws IOException { @@ -555,8 +547,15 @@ protected InputStream openSlice(int slice) throws IOException { endInPart = currentPart == endPart ? getRelativePositionInPart(position + readLength - 1) + 1 : fileInfo.partBytes(currentPart); + final long length = endInPart - startInPart; + stats.addBlobStoreBytesRequested(length); return directory.blobContainer() - .readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(currentPart), startInPart, endInPart - startInPart); + .readBlob(OperationPurpose.SNAPSHOT_DATA, fileInfo.partName(currentPart), startInPart, length); + } + + @Override + public boolean markSupported() { + return false; } }; } From 830c26feebc05433368bada11df3ae14b214e2cb Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 11 Sep 2024 12:52:08 +0200 Subject: [PATCH 22/58] Make two SubReaderWrapper implementations singletons (#112596) random find: No state in either of these, no need to recreate them for every directory. Saves a few cycles and makes it more obvious that there's no state in these. --- .../elasticsearch/common/lucene/Lucene.java | 36 ++++++++++--------- .../elasticsearch/index/shard/IndexShard.java | 21 +++++++---- 2 files changed, 34 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index acdc3e32ea31a..95552fa508f72 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -884,24 +884,26 @@ protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReade } } - DirectoryReaderWithAllLiveDocs(DirectoryReader in) throws IOException { - super(in, new SubReaderWrapper() { - @Override - public LeafReader wrap(LeafReader leaf) { - final SegmentReader segmentReader = segmentReader(leaf); - final Bits hardLiveDocs = segmentReader.getHardLiveDocs(); - if (hardLiveDocs == null) { - return new LeafReaderWithLiveDocs(leaf, null, leaf.maxDoc()); - } - // Once soft-deletes is enabled, we no longer hard-update or hard-delete documents directly. - // Two scenarios that we have hard-deletes: (1) from old segments where soft-deletes was disabled, - // (2) when IndexWriter hits non-aborted exceptions. These two cases, IW flushes SegmentInfos - // before exposing the hard-deletes, thus we can use the hard-delete count of SegmentInfos. - final int numDocs = segmentReader.maxDoc() - segmentReader.getSegmentInfo().getDelCount(); - assert numDocs == popCount(hardLiveDocs) : numDocs + " != " + popCount(hardLiveDocs); - return new LeafReaderWithLiveDocs(segmentReader, hardLiveDocs, numDocs); + private static final SubReaderWrapper ALL_LIVE_DOCS_SUB_READER_WRAPPER = new SubReaderWrapper() { + @Override + public LeafReader wrap(LeafReader leaf) { + final SegmentReader segmentReader = segmentReader(leaf); + final Bits hardLiveDocs = segmentReader.getHardLiveDocs(); + if (hardLiveDocs == null) { + return new LeafReaderWithLiveDocs(leaf, null, leaf.maxDoc()); } - }); + // Once soft-deletes is enabled, we no longer hard-update or hard-delete documents directly. + // Two scenarios that we have hard-deletes: (1) from old segments where soft-deletes was disabled, + // (2) when IndexWriter hits non-aborted exceptions. These two cases, IW flushes SegmentInfos + // before exposing the hard-deletes, thus we can use the hard-delete count of SegmentInfos. + final int numDocs = segmentReader.maxDoc() - segmentReader.getSegmentInfo().getDelCount(); + assert numDocs == popCount(hardLiveDocs) : numDocs + " != " + popCount(hardLiveDocs); + return new LeafReaderWithLiveDocs(segmentReader, hardLiveDocs, numDocs); + } + }; + + DirectoryReaderWithAllLiveDocs(DirectoryReader in) throws IOException { + super(in, ALL_LIVE_DOCS_SUB_READER_WRAPPER); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b7d1beb4d1e06..8f1ae42a7475c 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1700,13 +1700,22 @@ public void setGlobalCheckpointIfUnpromotable(long globalCheckpoint) { private static final class NonClosingReaderWrapper extends FilterDirectoryReader { + private static final LeafReader[] EMPTY_LEAF_READERS = new LeafReader[0]; + + private static final FilterDirectoryReader.SubReaderWrapper SUB_READER_WRAPPER = new SubReaderWrapper() { + @Override + public LeafReader wrap(LeafReader reader) { + return reader; + } + + @Override + protected LeafReader[] wrap(List readers) { + return readers.toArray(EMPTY_LEAF_READERS); + } + }; + private NonClosingReaderWrapper(DirectoryReader in) throws IOException { - super(in, new SubReaderWrapper() { - @Override - public LeafReader wrap(LeafReader reader) { - return reader; - } - }); + super(in, SUB_READER_WRAPPER); } @Override From 02a97505f337c5e5d041385f9dc9e3d325beb586 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 11 Sep 2024 13:04:34 +0200 Subject: [PATCH 23/58] Two speedups to IndexNameExpressionResolver (#112486) Two obvious speedups to the IndexNameExpressionResolver where we can defer an expensive lookup from either the indices lookup or the thread context to if and when we actually need it. --- .../metadata/IndexNameExpressionResolver.java | 19 ++++++++----------- .../elasticsearch/indices/SystemIndices.java | 3 +-- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 459c6c6ec733e..b945fe7e510f6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -567,17 +567,14 @@ private static boolean shouldTrackConcreteIndex(Context context, IndicesOptions // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - IndexAbstraction indexAbstraction = context.getState().metadata().getIndicesLookup().get(index.getName()); - if (context.options.allowFailureIndices() == false) { - DataStream parentDataStream = indexAbstraction.getParentDataStream(); - if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { - if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (options.ignoreUnavailable()) { - return false; - } else { - throw new FailureIndexNotSupportedException(index); - } + if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { + DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); + if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { + if (parentDataStream.isFailureStoreIndex(index.getName())) { + if (options.ignoreUnavailable()) { + return false; + } else { + throw new FailureIndexNotSupportedException(index); } } } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 3261ac83a7e67..bafbd57a607a5 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -562,11 +562,10 @@ public static SystemIndexAccessLevel getSystemIndexAccessLevel(ThreadContext thr // This method intentionally cannot return BACKWARDS_COMPATIBLE_ONLY - that access level should only be used manually // in known special cases. final String headerValue = threadContext.getHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY); - final String productHeaderValue = threadContext.getHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY); final boolean allowed = Booleans.parseBoolean(headerValue, true); if (allowed) { - if (productHeaderValue != null) { + if (threadContext.getHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY) != null) { return SystemIndexAccessLevel.RESTRICTED; } else { return SystemIndexAccessLevel.ALL; From 7f66489e399b8aaccace550d0cc4d718ea1cd42d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 11 Sep 2024 14:00:15 +0200 Subject: [PATCH 24/58] Fix failing LangMustacheClientYamlTestSuiteIT yamlRestTestV7CompatTest (#112683) The error message changed slightly to include the failure location. While this was added to the main test yml file, the yamlRestTestV7CompatTest resources seem to get copied from v7 files and we then modify them if we expect any changes. Closes #112580 --- modules/lang-mustache/build.gradle | 1 + muted-tests.yml | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index c36275699e21f..7059165af2d9f 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -29,4 +29,5 @@ restResources { tasks.named("yamlRestTestV7CompatTransform").configure {task -> task.addAllowedWarningRegex("\\[types removal\\].*") task.replaceValueInMatch("responses.1.error.root_cause.0.type", "x_content_e_o_f_exception", "Multi-search template with errors") + task.replaceValueInMatch("responses.1.error.root_cause.0.reason", "/\\[1:22\\].Unexpected.end.of.file/", "Multi-search template with errors") } diff --git a/muted-tests.yml b/muted-tests.yml index 63aaae899790d..be212c1ecf10c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -154,9 +154,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/112471 - class: org.elasticsearch.ingest.geoip.IngestGeoIpClientYamlTestSuiteIT issue: https://github.com/elastic/elasticsearch/issues/111497 -- class: org.elasticsearch.script.mustache.LangMustacheClientYamlTestSuiteIT - method: test {yaml=lang_mustache/50_multi_search_template/Multi-search template with errors} - issue: https://github.com/elastic/elasticsearch/issues/112580 +- class: org.elasticsearch.smoketest.SmokeTestIngestWithAllDepsClientYamlTestSuiteIT + method: test {yaml=ingest/80_ingest_simulate/Test ingest simulate with reroute and mapping validation from templates} + issue: https://github.com/elastic/elasticsearch/issues/112575 - class: org.elasticsearch.xpack.security.authc.kerberos.SimpleKdcLdapServerTests method: testClientServiceMutualAuthentication issue: https://github.com/elastic/elasticsearch/issues/112529 From ae94a4038287a51a5a56bc93f40f44b9052565fc Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 11 Sep 2024 13:18:03 +0100 Subject: [PATCH 25/58] Fix trappy timeouts in downsample action (#112734) Relates #107984 --- .../lifecycle/DataStreamLifecycleService.java | 8 +++++++- .../action/downsample/DownsampleAction.java | 7 ++++--- .../elasticsearch/xpack/core/ilm/DownsampleStep.java | 9 +++++++-- .../downsample/DownsampleTransportFailureIT.java | 2 ++ .../xpack/downsample/RestDownsampleAction.java | 2 ++ .../downsample/DownsampleActionSingleNodeTests.java | 12 +++++++++--- .../xpack/downsample/DownsampleDataStreamTests.java | 1 + 7 files changed, 32 insertions(+), 9 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 99d4f8bb7cd28..d8f8ae9d080a7 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -561,7 +561,13 @@ private Set waitForInProgressOrTriggerDownsampling( * Issues a request downsample the source index to the downsample index for the specified round. */ private void downsampleIndexOnce(DataStreamLifecycle.Downsampling.Round round, String sourceIndex, String downsampleIndexName) { - DownsampleAction.Request request = new DownsampleAction.Request(sourceIndex, downsampleIndexName, null, round.config()); + DownsampleAction.Request request = new DownsampleAction.Request( + TimeValue.THIRTY_SECONDS /* TODO should this be longer/configurable? */, + sourceIndex, + downsampleIndexName, + null, + round.config() + ); transportActionsDeduplicator.executeOnce( request, new ErrorRecordingActionListener( diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index 7d2b1be79731e..8ccc190a0444b 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -45,20 +45,21 @@ public static class Request extends MasterNodeRequest implements Indice private DownsampleConfig downsampleConfig; public Request( + TimeValue masterNodeTimeout, final String sourceIndex, final String targetIndex, final TimeValue waitTimeout, final DownsampleConfig downsampleConfig ) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + super(masterNodeTimeout); this.sourceIndex = sourceIndex; this.targetIndex = targetIndex; this.waitTimeout = waitTimeout == null ? DEFAULT_WAIT_TIMEOUT : waitTimeout; this.downsampleConfig = downsampleConfig; } - public Request() { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + public Request(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); } public Request(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java index bfbc32e11e93d..b179195e87770 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleStep.java @@ -90,8 +90,13 @@ public void performAction( void performDownsampleIndex(String indexName, String downsampleIndexName, ActionListener listener) { DownsampleConfig config = new DownsampleConfig(fixedInterval); - DownsampleAction.Request request = new DownsampleAction.Request(indexName, downsampleIndexName, waitTimeout, config) - .masterNodeTimeout(TimeValue.MAX_VALUE); + DownsampleAction.Request request = new DownsampleAction.Request( + TimeValue.MAX_VALUE, + indexName, + downsampleIndexName, + waitTimeout, + config + ); // Currently, DownsampleAction always acknowledges action was complete when no exceptions are thrown. getClient().execute(DownsampleAction.INSTANCE, request, listener.delegateFailureAndWrap((l, response) -> l.onResponse(null))); } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java index d94d609cf3470..365f31f8e5fe1 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java @@ -267,6 +267,7 @@ public void testNoDisruption() { // GIVEN final DownsampleAction.Request downsampleRequest = new DownsampleAction.Request( + TEST_REQUEST_TIMEOUT, SOURCE_INDEX_NAME, TARGET_INDEX_NAME, WAIT_TIMEOUT, @@ -294,6 +295,7 @@ public void testDownsampleActionExceptionDisruption() { // GIVEN final MockTransportService coordinator = MockTransportService.getInstance(testCluster.coordinator); final DownsampleAction.Request downsampleRequest = new DownsampleAction.Request( + TEST_REQUEST_TIMEOUT, SOURCE_INDEX_NAME, TARGET_INDEX_NAME, WAIT_TIMEOUT, diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java index 8324265c3a786..eb8dfe72850a2 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.Scope; import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; @@ -40,6 +41,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient config = DownsampleConfig.fromXContent(parser); } DownsampleAction.Request request = new DownsampleAction.Request( + RestUtils.getMasterNodeTimeout(restRequest), sourceIndex, targetIndex, TimeValue.parseTimeValue(timeout, null, "wait_timeout"), diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 6a615d648a850..812b48ee4cae5 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -594,7 +594,7 @@ public void onFailure(Exception e) { }; client().execute( DownsampleAction.INSTANCE, - new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config), + new DownsampleAction.Request(TEST_REQUEST_TIMEOUT, sourceIndex, downsampleIndex, TIMEOUT, config), downsampleListener ); assertBusy(() -> { @@ -607,7 +607,10 @@ public void onFailure(Exception e) { assertBusy(() -> { try { - client().execute(DownsampleAction.INSTANCE, new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config)); + client().execute( + DownsampleAction.INSTANCE, + new DownsampleAction.Request(TEST_REQUEST_TIMEOUT, sourceIndex, downsampleIndex, TIMEOUT, config) + ); } catch (ElasticsearchException e) { fail("transient failure due to overlapping downsample operations"); } @@ -1145,7 +1148,10 @@ private void prepareSourceIndex(final String sourceIndex, boolean blockWrite) { private void downsample(String sourceIndex, String downsampleIndex, DownsampleConfig config) { assertAcked( - client().execute(DownsampleAction.INSTANCE, new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config)) + client().execute( + DownsampleAction.INSTANCE, + new DownsampleAction.Request(TEST_REQUEST_TIMEOUT, sourceIndex, downsampleIndex, TIMEOUT, config) + ) ); } diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java index 834a1e887caa7..c705b3c6a98d3 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleDataStreamTests.java @@ -98,6 +98,7 @@ public void testDataStreamDownsample() throws ExecutionException, InterruptedExc // WHEN (simulate downsampling as done by an ILM action) final String downsampleTargetIndex = DataStream.BACKING_INDEX_PREFIX + dataStreamName + "-downsample-1h"; final DownsampleAction.Request downsampleRequest = new DownsampleAction.Request( + TEST_REQUEST_TIMEOUT, rolloverResponse.getOldIndex(), downsampleTargetIndex, TIMEOUT, From 06a361a724acfd9695379709feaad1bd977335b3 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 11 Sep 2024 22:47:17 +1000 Subject: [PATCH 26/58] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/cluster/stats/line_1450} #112732 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index be212c1ecf10c..ac35776db665f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -205,6 +205,9 @@ tests: - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testGetJob_GivenNoSuchJob issue: https://github.com/elastic/elasticsearch/issues/112730 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/cluster/stats/line_1450} + issue: https://github.com/elastic/elasticsearch/issues/112732 # Examples: # From 802ee008eb5c2ae7730c1274b28c9512db25c93d Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 11 Sep 2024 14:17:59 +0100 Subject: [PATCH 27/58] Introduce repository integrity verification API (#112348) Adds an API which scans all the metadata (and optionally the raw data) in a snapshot repository to look for corruptions or other inconsistencies. Closes https://github.com/elastic/elasticsearch/issues/52622 Closes ES-8560 --- docs/changelog/112348.yaml | 6 + .../apis/snapshot-restore-apis.asciidoc | 1 + .../apis/verify-repo-integrity-api.asciidoc | 232 +++++ .../register-repository.asciidoc | 4 +- .../snapshot.repository_verify_integrity.json | 65 ++ .../repositories/RepositoryData.java | 7 + .../qa/native-multi-node-tests/build.gradle | 1 + .../xpack/security/operator/Constants.java | 1 + .../test/20_verify_integrity.yml | 39 + .../RepositoryVerifyIntegrityIT.java | 806 +++++++++++++++ .../src/main/java/module-info.java | 24 + .../testkit/SnapshotRepositoryTestKit.java | 25 +- .../SnapshotRepositoryTestKitFeatures.java | 22 + .../ActiveRepositoryVerifyIntegrityTasks.java | 56 ++ .../testkit/integrity/IndexDescription.java | 56 ++ .../RepositoryIntegrityVerifier.java | 949 ++++++++++++++++++ .../RepositoryVerifyIntegrityParams.java | 135 +++ .../RepositoryVerifyIntegrityResponse.java | 47 + ...epositoryVerifyIntegrityResponseChunk.java | 355 +++++++ ...positoryVerifyIntegrityResponseStream.java | 151 +++ .../RepositoryVerifyIntegrityTask.java | 133 +++ .../RestRepositoryVerifyIntegrityAction.java | 49 + ...nsportRepositoryVerifyIntegrityAction.java | 158 +++ ...toryVerifyIntegrityCoordinationAction.java | 186 ++++ ...oryVerifyIntegrityResponseChunkAction.java | 93 ++ ...lasticsearch.features.FeatureSpecification | 8 + 26 files changed, 3606 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/112348.yaml create mode 100644 docs/reference/snapshot-restore/apis/verify-repo-integrity-api.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json create mode 100644 x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/ActiveRepositoryVerifyIntegrityTasks.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/IndexDescription.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityParams.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponse.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseStream.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityCoordinationAction.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityResponseChunkAction.java create mode 100644 x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification diff --git a/docs/changelog/112348.yaml b/docs/changelog/112348.yaml new file mode 100644 index 0000000000000..84110a7cd4f1b --- /dev/null +++ b/docs/changelog/112348.yaml @@ -0,0 +1,6 @@ +pr: 112348 +summary: Introduce repository integrity verification API +area: Snapshot/Restore +type: enhancement +issues: + - 52622 diff --git a/docs/reference/snapshot-restore/apis/snapshot-restore-apis.asciidoc b/docs/reference/snapshot-restore/apis/snapshot-restore-apis.asciidoc index 6cdf65ba54e7e..b8bb6a2cd7d13 100644 --- a/docs/reference/snapshot-restore/apis/snapshot-restore-apis.asciidoc +++ b/docs/reference/snapshot-restore/apis/snapshot-restore-apis.asciidoc @@ -28,6 +28,7 @@ For more information, see <>. include::put-repo-api.asciidoc[] include::verify-repo-api.asciidoc[] include::repo-analysis-api.asciidoc[] +include::verify-repo-integrity-api.asciidoc[] include::get-repo-api.asciidoc[] include::delete-repo-api.asciidoc[] include::clean-up-repo-api.asciidoc[] diff --git a/docs/reference/snapshot-restore/apis/verify-repo-integrity-api.asciidoc b/docs/reference/snapshot-restore/apis/verify-repo-integrity-api.asciidoc new file mode 100644 index 0000000000000..99ae126b401f5 --- /dev/null +++ b/docs/reference/snapshot-restore/apis/verify-repo-integrity-api.asciidoc @@ -0,0 +1,232 @@ +[role="xpack"] +[[verify-repo-integrity-api]] +=== Verify repository integrity API +++++ +Verify repository integrity +++++ + +Verifies the integrity of the contents of a snapshot repository. + +//// +[source,console] +---- +PUT /_snapshot/my_repository +{ + "type": "fs", + "settings": { + "location": "my_backup_location" + } +} +---- +// TESTSETUP +//// + +[source,console] +---- +POST /_snapshot/my_repository/_verify_integrity +---- + +[[verify-repo-integrity-api-request]] +==== {api-request-title} + +`POST /_snapshot//_verify_integrity` + +[[verify-repo-integrity-api-prereqs]] +==== {api-prereq-title} + +* If the {es} {security-features} are enabled, you must have the `manage` +<> to use this API. For more +information, see <>. + +[[verify-repo-integrity-api-desc]] +==== {api-description-title} + +This API allows you to perform a comprehensive check of the contents of a +repository, looking for any anomalies in its data or metadata which might +prevent you from restoring snapshots from the repository or which might cause +future snapshot create or delete operations to fail. + +If you suspect the integrity of the contents of one of your snapshot +repositories, cease all write activity to this repository immediately, set its +`read_only` option to `true`, and use this API to verify its integrity. Until +you do so: + +* It may not be possible to <> from this repository. + +* <> may report errors when searched, or may have + unassigned shards. + +* <> into this repository may fail, + or may appear to succeed having created a snapshot which cannot be restored. + +* <> from this repository may fail, or + may appear to succeed leaving the underlying data on disk. + +* Continuing to write to the repository while it is in an invalid state may + causing additional damage to its contents. + +If the <> API finds any problems with the integrity +of the contents of your repository, {es} will not be able to repair the damage. +The only way to bring the repository back into a fully working state after its +contents have been damaged is by restoring its contents from a +<> which was taken before the +damage occurred. You must also identify what caused the damage and take action +to prevent it from happening again. + +If you cannot restore a repository backup, +<> and use this for +all future snapshot operations. In some cases it may be possible to recover +some of the contents of a damaged repository, either by +<> as many of its snapshots as needed and +<> of the restored data, or by +using the <> API to copy data from any <> +mounted from the damaged repository. + +Avoid all operations which write to the repository while the +<> API is running. If something changes the +repository contents while an integrity verification is running then {es} may +incorrectly report having detected some anomalies in its contents due to the +concurrent writes. It may also incorrectly fail to report some anomalies that +the concurrent writes prevented it from detecting. + +NOTE: This API is intended for exploratory use by humans. You should expect the +request parameters and the response format to vary in future versions. + +NOTE: This API may not work correctly in a mixed-version cluster. + +[[verify-repo-integrity-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) +Name of the snapshot repository whose integrity to verify. + +[[verify-repo-integrity-api-query-params]] +==== {api-query-parms-title} + +The default values for the parameters of this API are designed to limit the +impact of the integrity verification on other activities in your cluster. For +instance, by default it will only use at most half of the `snapshot_meta` +threads to verify the integrity of each snapshot, allowing other snapshot +operations to use the other half of this thread pool. + +If you modify these parameters to speed up the verification process, you risk +disrupting other snapshot-related operations in your cluster. For large +repositories, consider setting up a separate single-node {es} cluster just for +running the integrity verification API. + +`snapshot_verification_concurrency`:: +(Optional, integer) Specifies the number of snapshots to verify concurrently. +Defaults to `0` which means to use at most half of the `snapshot_meta` thread +pool at once. + +`index_verification_concurrency`:: +(Optional, integer) Specifies the number of indices to verify concurrently. +Defaults to `0` which means to use the entire `snapshot_meta` thread pool. + +`meta_thread_pool_concurrency`:: +(Optional, integer) Specifies the maximum number of snapshot metadata +operations to execute concurrently. Defaults to `0` which means to use at most +half of the `snapshot_meta` thread pool at once. + +`index_snapshot_verification_concurrency`:: +(Optional, integer) Specifies the maximum number of index snapshots to verify +concurrently within each index verification. Defaults to `1`. + +`max_failed_shard_snapshots`:: +(Optional, integer) Limits the number of shard snapshot failures to track +during integrity verification, in order to avoid excessive resource usage. If +your repository contains more than this number of shard snapshot failures then +the verification will fail. Defaults to `10000`. + +`verify_blob_contents`:: +(Optional, boolean) Specifies whether to verify the checksum of every data blob +in the repository. Defaults to `false`. If this feature is enabled, {es} will +read the entire repository contents, which may be extremely slow and expensive. + +`blob_thread_pool_concurrency`:: +(Optional, integer) If `?verify_blob_contents` is `true`, this parameter +specifies how many blobs to verify at once. Defaults to `1`. + +`max_bytes_per_sec`:: +(Optional, <>) +If `?verify_blob_contents` is `true`, this parameter specifies the maximum +amount of data that {es} will read from the repository every second. Defaults +to `10mb`. + +[role="child_attributes"] +[[verify-repo-integrity-api-response-body]] +==== {api-response-body-title} + +The response exposes implementation details of the analysis which may change +from version to version. The response body format is therefore not considered +stable and may be different in newer versions. + +`log`:: +(array) A sequence of objects that report the progress of the analysis. ++ +.Properties of `log` +[%collapsible%open] +==== +`timestamp_in_millis`:: +(integer) The timestamp of this log entry, represented as the number of +milliseconds since the {wikipedia}/Unix_time[Unix epoch]. + +`timestamp`:: +(string) The timestamp of this log entry, represented as a string formatted +according to {wikipedia}/ISO_8601[ISO 8601]. Only included if the +<> flag is set. + +`snapshot`:: +(object) If the log entry pertains to a particular snapshot then the snapshot +will be described in this object. + +`index`:: +(object) If the log entry pertains to a particular index then the index will be +described in this object. + +`snapshot_restorability`:: +(object) If the log entry pertains to the restorability of an index then the +details will be described in this object. + +`anomaly`:: +(string) If the log entry pertains to an anomaly in the repository contents then +this string will describe the anomaly. + +`exception`:: +(object) If the log entry pertains to an exception that {es} encountered during +the verification then the details will be included in this object. + +==== + +`results`:: +(object) An object which describes the final results of the analysis. ++ +.Properties of `results` +[%collapsible%open] +==== +`status`:: +(object) The final status of the analysis task. + +`final_repository_generation`:: +(integer) The repository generation at the end of the analysis. If there were +any writes to the repository during the analysis then this value will be +different from the `generation` reported in the task status, and the analysis +may have detected spurious anomalies due to the concurrent writes, or may even +have failed to detect some anomalies in the repository contents. + +`total_anomalies`:: +(integer) The total number of anomalies detected during the analysis. + +`result`:: +(string) The final result of the analysis. If the repository contents appear to +be intact then this will be the string `pass`. If this field is missing, or +contains some other value, then the repository contents were not fully +verified. + +==== + +`exception`:: +(object) If the analysis encountered an exception which prevented it from +completing successfully then this exception will be reported here. diff --git a/docs/reference/snapshot-restore/register-repository.asciidoc b/docs/reference/snapshot-restore/register-repository.asciidoc index 28b0640a8fae5..2147ad3c684f3 100644 --- a/docs/reference/snapshot-restore/register-repository.asciidoc +++ b/docs/reference/snapshot-restore/register-repository.asciidoc @@ -272,7 +272,9 @@ filesystem snapshot of this repository. When restoring a repository from a backup, you must not register the repository with {es} until the repository contents are fully restored. If you alter the contents of a repository while it is registered with {es} then the repository -may become unreadable or may silently lose some of its contents. +may become unreadable or may silently lose some of its contents. After +restoring a repository from a backup, use the <> API +to verify its integrity before you start to use the repository. include::repository-azure.asciidoc[] include::repository-gcs.asciidoc[] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json new file mode 100644 index 0000000000000..bab8101b74552 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/snapshot.repository_verify_integrity.json @@ -0,0 +1,65 @@ +{ + "snapshot.repository_verify_integrity":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", + "description":"Verifies the integrity of the contents of a snapshot repository" + }, + "stability":"experimental", + "visibility":"private", + "headers": { + "accept": [ + "application/json" + ] + }, + "url":{ + "paths":[ + { + "path":"/_snapshot/{repository}/_verify_integrity", + "methods":[ + "POST" + ], + "parts":{ + "repository":{ + "type":"string", + "description":"A repository name" + } + } + } + ] + }, + "params":{ + "meta_thread_pool_concurrency":{ + "type":"number", + "description":"Number of threads to use for reading metadata" + }, + "blob_thread_pool_concurrency":{ + "type":"number", + "description":"Number of threads to use for reading blob contents" + }, + "snapshot_verification_concurrency":{ + "type":"number", + "description":"Number of snapshots to verify concurrently" + }, + "index_verification_concurrency":{ + "type":"number", + "description":"Number of indices to verify concurrently" + }, + "index_snapshot_verification_concurrency":{ + "type":"number", + "description":"Number of snapshots to verify concurrently within each index" + }, + "max_failed_shard_snapshots":{ + "type":"number", + "description":"Maximum permitted number of failed shard snapshots" + }, + "verify_blob_contents":{ + "type":"boolean", + "description":"Whether to verify the contents of individual blobs" + }, + "max_bytes_per_sec":{ + "type":"string", + "description":"Rate limit for individual blob verification" + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index c6494eca9823b..72376d5b20fdb 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -281,6 +281,13 @@ public Collection getSnapshotIds() { return snapshotIds.values(); } + /** + * @return the number of index snapshots (i.e. the sum of the index count of each snapshot) + */ + public long getIndexSnapshotCount() { + return indexSnapshots.values().stream().mapToLong(List::size).sum(); + } + /** * @return whether some of the {@link SnapshotDetails} of the given snapshot are missing, due to BwC, so that they must be loaded from * the {@link SnapshotInfo} blob instead. diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle index c9e860f27a5d4..19f2e984f6493 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/build.gradle @@ -17,6 +17,7 @@ dependencies { javaRestTestImplementation project(path: xpackModule('rank-rrf')) javaRestTestImplementation project(path: xpackModule('esql-core')) javaRestTestImplementation project(path: xpackModule('esql')) + javaRestTestImplementation project(path: xpackModule('snapshot-repo-test-kit')) } // location for keys and certificates diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index c5304d8313df2..853d0fd9318ae 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -98,6 +98,7 @@ public class Constants { "cluster:admin/snapshot/restore", "cluster:admin/snapshot/status", "cluster:admin/snapshot/status[nodes]", + "cluster:admin/repository/verify_integrity", "cluster:admin/features/get", "cluster:admin/features/reset", "cluster:admin/tasks/cancel", diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml new file mode 100644 index 0000000000000..be6929a15ff44 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/20_verify_integrity.yml @@ -0,0 +1,39 @@ +--- +setup: + - requires: + cluster_features: "snapshot.repository_verify_integrity" + reason: "required feature" + + - do: + snapshot.create_repository: + repository: test_repo + body: + type: fs + settings: + location: "test_repo_loc" + + - do: + bulk: + index: test + refresh: true + body: + - '{"index":{}}' + - '{}' + + - do: + snapshot.create: + repository: test_repo + snapshot: snap + wait_for_completion: true + +--- +"Integrity verification": + - do: + snapshot.repository_verify_integrity: + repository: test_repo + + - match: {results.result: pass} + - match: {results.status.snapshots.total: 1} + - match: {results.status.snapshots.verified: 1} + - match: {results.status.indices.total: 1} + - match: {results.status.indices.verified: 1} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java new file mode 100644 index 0000000000000..4b0e0fdbb0955 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java @@ -0,0 +1,806 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshotsIntegritySuppressor; +import org.elasticsearch.index.snapshots.blobstore.SnapshotFiles; +import org.elasticsearch.index.store.StoreFileMetadata; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.ShardGenerations; +import org.elasticsearch.repositories.blobstore.BlobStoreCorruptionUtils; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.blobstore.RepositoryFileType; +import org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKit; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.test.transport.StubbableTransport; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.OptionalLong; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.function.UnaryOperator; +import java.util.stream.Collectors; +import java.util.stream.LongStream; +import java.util.stream.StreamSupport; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.INDEX_SHARD_SNAPSHOTS_FORMAT; +import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.SNAPSHOT_FORMAT; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; + +public class RepositoryVerifyIntegrityIT extends AbstractSnapshotIntegTestCase { + + @Override + protected boolean addMockHttpTransport() { + return false; // enable HTTP + } + + @SuppressWarnings("unchecked") + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements( + super.nodePlugins(), + SnapshotRepositoryTestKit.class, + MockTransportService.TestPlugin.class + ); + } + + private static long getCurrentTime(Function summarizer) { + return summarizer.apply( + StreamSupport.stream(internalCluster().getInstances(ThreadPool.class).spliterator(), false) + .mapToLong(ThreadPool::absoluteTimeInMillis) + ).orElseThrow(AssertionError::new); + } + + public void testSuccess() throws IOException { + final var minStartTimeMillis = getCurrentTime(LongStream::min); + final var testContext = createTestContext(); + final var request = testContext.getVerifyIntegrityRequest(); + if (randomBoolean()) { + request.addParameter("verify_blob_contents", null); + } + final var response = getRestClient().performRequest(request); + final var maxEndTimeMillis = getCurrentTime(LongStream::max); + assertEquals(200, response.getStatusLine().getStatusCode()); + final var responseObjectPath = ObjectPath.createFromResponse(response); + final var logEntryCount = responseObjectPath.evaluateArraySize("log"); + final var seenSnapshotNames = new HashSet(); + final var seenIndexNames = new HashSet(); + for (int i = 0; i < logEntryCount; i++) { + assertThat( + responseObjectPath.evaluate("log." + i + ".timestamp_in_millis"), + allOf(greaterThanOrEqualTo(minStartTimeMillis), lessThanOrEqualTo(maxEndTimeMillis)) + ); + assertThat( + responseObjectPath.evaluate("log." + i + ".timestamp"), + request.getParameters().containsKey("human") ? instanceOf(String.class) : nullValue() + ); + final String maybeSnapshotName = responseObjectPath.evaluate("log." + i + ".snapshot.snapshot"); + if (maybeSnapshotName != null) { + assertTrue(seenSnapshotNames.add(maybeSnapshotName)); + } else { + final String indexName = responseObjectPath.evaluate("log." + i + ".index.name"); + assertNotNull(indexName); + assertTrue(seenIndexNames.add(indexName)); + assertEquals( + testContext.snapshotNames().size(), + (int) responseObjectPath.evaluate("log." + i + ".snapshot_restorability.total_snapshot_count") + ); + assertEquals( + testContext.snapshotNames().size(), + (int) responseObjectPath.evaluate("log." + i + ".snapshot_restorability.restorable_snapshot_count") + ); + } + } + assertEquals(Set.copyOf(testContext.snapshotNames()), seenSnapshotNames); + assertEquals(Set.copyOf(testContext.indexNames()), seenIndexNames); + + assertEquals(0, (int) responseObjectPath.evaluate("results.total_anomalies")); + assertEquals("pass", responseObjectPath.evaluate("results.result")); + } + + public void testTaskStatus() throws IOException { + final var testContext = createTestContext(); + + // use non-master node to coordinate the request so that we can capture chunks being sent back + final var coordNodeName = getCoordinatingNodeName(); + final var coordNodeTransportService = MockTransportService.getInstance(coordNodeName); + final var masterTaskManager = MockTransportService.getInstance(internalCluster().getMasterName()).getTaskManager(); + + final SubscribableListener snapshotsCompleteStatusListener = new SubscribableListener<>(); + final AtomicInteger chunksSeenCounter = new AtomicInteger(); + + coordNodeTransportService.addRequestHandlingBehavior( + TransportRepositoryVerifyIntegrityResponseChunkAction.ACTION_NAME, + (handler, request, channel, task) -> { + final SubscribableListener unblockChunkHandlingListener = switch (request.chunkContents().type()) { + case START_RESPONSE -> { + final var status = asInstanceOf( + RepositoryVerifyIntegrityTask.Status.class, + randomBoolean() + ? masterTaskManager.getTask(task.getParentTaskId().getId()).getStatus() + : client().admin() + .cluster() + .prepareGetTask(task.getParentTaskId()) + .get(SAFE_AWAIT_TIMEOUT) + .getTask() + .getTask() + .status() + ); + assertEquals(testContext.repositoryName(), status.repositoryName()); + assertEquals(testContext.snapshotNames().size(), status.snapshotCount()); + assertEquals(0L, status.snapshotsVerified()); + assertEquals(testContext.indexNames().size(), status.indexCount()); + assertEquals(0L, status.indicesVerified()); + assertEquals(testContext.indexNames().size() * testContext.snapshotNames().size(), status.indexSnapshotCount()); + assertEquals(0L, status.indexSnapshotsVerified()); + assertEquals(0L, status.blobsVerified()); + assertEquals(0L, status.blobBytesVerified()); + yield SubscribableListener.newSucceeded(null); + } + case INDEX_RESTORABILITY -> { + // several of these chunks might arrive concurrently; we want to verify the task status before processing any of + // them, so use SubscribableListener to pick out the first status + snapshotsCompleteStatusListener.onResponse( + asInstanceOf( + RepositoryVerifyIntegrityTask.Status.class, + masterTaskManager.getTask(task.getParentTaskId().getId()).getStatus() + ) + ); + yield snapshotsCompleteStatusListener.andThenAccept(status -> { + assertEquals(testContext.repositoryName(), status.repositoryName()); + assertEquals(testContext.snapshotNames().size(), status.snapshotCount()); + assertEquals(testContext.snapshotNames().size(), status.snapshotsVerified()); + assertEquals(testContext.indexNames().size(), status.indexCount()); + assertEquals(0L, status.indicesVerified()); + }); + } + case SNAPSHOT_INFO -> SubscribableListener.newSucceeded(null); + case ANOMALY -> fail(null, "should not see anomalies"); + }; + + unblockChunkHandlingListener.addListener(ActionTestUtils.assertNoFailureListener(ignored -> { + chunksSeenCounter.incrementAndGet(); + handler.messageReceived(request, channel, task); + })); + } + ); + + try (var client = createRestClient(coordNodeName)) { + final var response = client.performRequest(testContext.getVerifyIntegrityRequest()); + assertEquals(1 + testContext.indexNames().size() + testContext.snapshotNames().size(), chunksSeenCounter.get()); + assertEquals(200, response.getStatusLine().getStatusCode()); + final var responseObjectPath = ObjectPath.createFromResponse(response); + assertEquals(0, (int) responseObjectPath.evaluate("results.total_anomalies")); + assertEquals("pass", responseObjectPath.evaluate("results.result")); + } finally { + coordNodeTransportService.clearAllRules(); + } + } + + public void testShardSnapshotFailed() throws IOException { + final var testContext = createTestContext(); + + final var newIndex = randomIdentifier(); + assertAcked( + client().admin() + .indices() + .prepareCreate(newIndex) + .setWaitForActiveShards(ActiveShardCount.NONE) + .setSettings(indexSettings(1, 0).put(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_id", "not-a-node-id")) + ); + + final var createSnapshotResponse = client().admin() + .cluster() + .prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, testContext.repositoryName(), randomIdentifier()) + .setWaitForCompletion(true) + .setPartial(true) + .get(); + + assertEquals(SnapshotState.PARTIAL, createSnapshotResponse.getSnapshotInfo().state()); + + final var takeGoodSnapshot = randomBoolean(); + if (takeGoodSnapshot) { + updateIndexSettings(Settings.builder().putNull(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "_id"), newIndex); + ensureGreen(newIndex); + createSnapshot(testContext.repositoryName(), randomIdentifier(), List.of(newIndex)); + } + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + final var responseObjectPath = ObjectPath.createFromResponse(response); + assertThat(getAnomalies(responseObjectPath), equalTo(Set.of())); + assertEquals(0, (int) responseObjectPath.evaluate("results.total_anomalies")); + assertEquals("pass", responseObjectPath.evaluate("results.result")); + + final var logEntryCount = responseObjectPath.evaluateArraySize("log"); + for (int i = 0; i < logEntryCount; i++) { + if (newIndex.equals(responseObjectPath.evaluate("log." + i + ".index.name"))) { + assertEquals( + takeGoodSnapshot ? 2 : 1, + (int) responseObjectPath.evaluate("log." + i + ".snapshot_restorability.total_snapshot_count") + ); + assertEquals( + takeGoodSnapshot ? 1 : 0, + (int) responseObjectPath.evaluate("log." + i + ".snapshot_restorability.restorable_snapshot_count") + ); + } + } + } + + public void testCorruption() throws IOException { + final var testContext = createTestContext(); + + final Response response; + final Path corruptedFile; + final RepositoryFileType corruptedFileType; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + corruptedFile = BlobStoreCorruptionUtils.corruptRandomFile(testContext.repositoryRootPath()); + corruptedFileType = RepositoryFileType.getRepositoryFileType(testContext.repositoryRootPath(), corruptedFile); + logger.info("--> corrupted file: {}", corruptedFile); + logger.info("--> corrupted file type: {}", corruptedFileType); + + final var request = testContext.getVerifyIntegrityRequest(); + if (corruptedFileType == RepositoryFileType.SHARD_DATA || randomBoolean()) { + request.addParameter("verify_blob_contents", null); + } + response = getRestClient().performRequest(request); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + final var responseObjectPath = ObjectPath.createFromResponse(response); + final var logEntryCount = responseObjectPath.evaluateArraySize("log"); + final var anomalies = new HashSet(); + final var seenIndexNames = new HashSet(); + int fullyRestorableIndices = 0; + for (int i = 0; i < logEntryCount; i++) { + final String maybeAnomaly = responseObjectPath.evaluate("log." + i + ".anomaly"); + if (maybeAnomaly != null) { + anomalies.add(maybeAnomaly); + } else { + final String indexName = responseObjectPath.evaluate("log." + i + ".index.name"); + if (indexName != null) { + assertTrue(seenIndexNames.add(indexName)); + assertThat(testContext.indexNames(), hasItem(indexName)); + final int totalSnapshots = responseObjectPath.evaluate("log." + i + ".snapshot_restorability.total_snapshot_count"); + final int restorableSnapshots = responseObjectPath.evaluate( + "log." + i + ".snapshot_restorability.restorable_snapshot_count" + ); + if (totalSnapshots == restorableSnapshots) { + fullyRestorableIndices += 1; + } + } + } + } + + assertThat( + fullyRestorableIndices, + corruptedFileType == RepositoryFileType.SHARD_GENERATION || corruptedFileType.equals(RepositoryFileType.GLOBAL_METADATA) + ? equalTo(testContext.indexNames().size()) + : lessThan(testContext.indexNames().size()) + ); + assertThat(anomalies, not(empty())); + assertThat(responseObjectPath.evaluate("results.total_anomalies"), greaterThanOrEqualTo(anomalies.size())); + assertEquals("fail", responseObjectPath.evaluate("results.result")); + + // remove permitted/expected anomalies to verify that no unexpected ones were seen + switch (corruptedFileType) { + case SNAPSHOT_INFO -> anomalies.remove("failed to load snapshot info"); + case GLOBAL_METADATA -> anomalies.remove("failed to load global metadata"); + case INDEX_METADATA -> anomalies.remove("failed to load index metadata"); + case SHARD_GENERATION -> anomalies.remove("failed to load shard generation"); + case SHARD_SNAPSHOT_INFO -> anomalies.remove("failed to load shard snapshot"); + case SHARD_DATA -> { + anomalies.remove("missing blob"); + anomalies.remove("mismatched blob length"); + anomalies.remove("corrupt data blob"); + } + } + assertThat(anomalies, empty()); + } + + public void testTransportException() throws IOException { + final var testContext = createTestContext(); + + // use non-master node to coordinate the request so that we can capture chunks being sent back + final var coordNodeName = getCoordinatingNodeName(); + final var coordNodeTransportService = MockTransportService.getInstance(coordNodeName); + final var masterTransportService = MockTransportService.getInstance(internalCluster().getMasterName()); + + final var messageCount = 2 // request & response + * (1 // forward to master + + 1 // start response + + testContext.indexNames().size() + testContext.snapshotNames().size()); + final var failureStep = between(1, messageCount); + + final var failTransportMessageBehaviour = new StubbableTransport.RequestHandlingBehavior<>() { + final AtomicInteger currentStep = new AtomicInteger(); + + @Override + public void messageReceived( + TransportRequestHandler handler, + TransportRequest request, + TransportChannel channel, + Task task + ) throws Exception { + if (currentStep.incrementAndGet() == failureStep) { + throw new ElasticsearchException("simulated"); + } else { + handler.messageReceived(request, new TransportChannel() { + @Override + public String getProfileName() { + return "test"; + } + + @Override + public void sendResponse(TransportResponse response) { + if (currentStep.incrementAndGet() == failureStep) { + channel.sendResponse(new ElasticsearchException("simulated")); + } else { + channel.sendResponse(response); + } + } + + @Override + public void sendResponse(Exception exception) { + if (currentStep.incrementAndGet() == failureStep) { + throw new AssertionError("shouldn't have failed yet"); + } else { + channel.sendResponse(exception); + } + } + }, task); + } + } + }; + + masterTransportService.addRequestHandlingBehavior( + TransportRepositoryVerifyIntegrityAction.ACTION_NAME, + failTransportMessageBehaviour + ); + + coordNodeTransportService.addRequestHandlingBehavior( + TransportRepositoryVerifyIntegrityResponseChunkAction.ACTION_NAME, + failTransportMessageBehaviour + ); + + final var request = testContext.getVerifyIntegrityRequest(); + if (failureStep <= 2) { + request.addParameter("ignore", "500"); + } + final Response response; + try (var restClient = createRestClient(coordNodeName)) { + response = restClient.performRequest(request); + } + final var responseObjectPath = ObjectPath.createFromResponse(response); + if (failureStep <= 2) { + assertEquals(500, response.getStatusLine().getStatusCode()); + assertNotNull(responseObjectPath.evaluate("error")); + assertEquals(500, (int) responseObjectPath.evaluate("status")); + } else { + assertEquals(200, response.getStatusLine().getStatusCode()); + assertNotNull(responseObjectPath.evaluate("log")); + assertNotNull(responseObjectPath.evaluate("exception")); + } + + assertNull(responseObjectPath.evaluate("results")); + } + + public void testBadSnapshotInfo() throws IOException { + final var testContext = createTestContext(); + + final var snapshotInfoBlob = BlobStoreCorruptionUtils.getRandomFileToCorrupt( + testContext.repositoryRootPath(), + RepositoryFileType.SNAPSHOT_INFO + ); + + final SnapshotInfo snapshotInfo; + try (var inputStream = Files.newInputStream(snapshotInfoBlob)) { + snapshotInfo = SNAPSHOT_FORMAT.deserialize(testContext.repositoryName(), xContentRegistry(), inputStream); + } + + final var newIndices = new ArrayList<>(snapshotInfo.indices()); + newIndices.remove(between(0, newIndices.size() - 1)); + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + try (var outputStream = Files.newOutputStream(snapshotInfoBlob)) { + SNAPSHOT_FORMAT.serialize( + new SnapshotInfo( + snapshotInfo.snapshot(), + newIndices, + snapshotInfo.dataStreams(), + snapshotInfo.featureStates(), + snapshotInfo.reason(), + snapshotInfo.version(), + snapshotInfo.startTime(), + snapshotInfo.endTime(), + snapshotInfo.totalShards(), + snapshotInfo.successfulShards(), + snapshotInfo.shardFailures(), + snapshotInfo.includeGlobalState(), + snapshotInfo.userMetadata(), + snapshotInfo.state(), + snapshotInfo.indexSnapshotDetails() + ), + snapshotInfoBlob.toString(), + randomBoolean(), + outputStream + ); + } + + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(getAnomalies(ObjectPath.createFromResponse(response)), equalTo(Set.of("snapshot contents mismatch"))); + } + + public void testShardPathEmpty() throws IOException { + final var testContext = createTestContext(); + + final var shardPath = BlobStoreCorruptionUtils.getRandomFileToCorrupt( + testContext.repositoryRootPath(), + RepositoryFileType.SHARD_GENERATION + ).getParent(); + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + IOUtils.rm(shardPath); + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(getAnomalies(ObjectPath.createFromResponse(response)), equalTo(Set.of("failed to load shard snapshot"))); + } + + public void testShardPathUnreadable() throws IOException { + final var testContext = createTestContext(); + + final var shardPath = BlobStoreCorruptionUtils.getRandomFileToCorrupt( + testContext.repositoryRootPath(), + RepositoryFileType.SHARD_GENERATION + ).getParent(); + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + IOUtils.rm(shardPath); + Files.write(shardPath, new byte[0], StandardOpenOption.CREATE_NEW); + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(getAnomalies(ObjectPath.createFromResponse(response)), equalTo(Set.of("failed to list shard container contents"))); + } + + public void testShardGenerationMissing() throws IOException { + final var testContext = createTestContext(); + + final var repository = asInstanceOf( + BlobStoreRepository.class, + internalCluster().getCurrentMasterNodeInstance(RepositoriesService.class).repository(testContext.repositoryName()) + ); + final var repoSettings = repository.getMetadata().settings(); + + final RepositoryData repositoryData = safeAwait(l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l)); + + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + + final var rootBlob = BlobStoreCorruptionUtils.getRandomFileToCorrupt( + testContext.repositoryRootPath(), + RepositoryFileType.ROOT_INDEX_N + ); + + final var indexToBreak = randomFrom(testContext.indexNames()); + final var newShardGenerations = ShardGenerations.builder(); + for (final var index : repositoryData.shardGenerations().indices()) { + final var indexShardGenerations = repositoryData.shardGenerations().getGens(index); + for (int i = 0; i < indexShardGenerations.size(); i++) { + if (i > 0 || index.getName().equals(indexToBreak) == false) { + newShardGenerations.put(index, i, indexShardGenerations.get(i)); + } + } + } + + final var brokenRepositoryData = new RepositoryData( + repositoryData.getUuid(), + repositoryData.getGenId(), + repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, Function.identity())), + repositoryData.getSnapshotIds().stream().collect(Collectors.toMap(SnapshotId::getUUID, repositoryData::getSnapshotDetails)), + repositoryData.getIndices().values().stream().collect(Collectors.toMap(Function.identity(), repositoryData::getSnapshots)), + newShardGenerations.build(), + repositoryData.indexMetaDataGenerations(), + repositoryData.getClusterUUID() + ); + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + + Files.write( + rootBlob, + BytesReference.toBytes( + BytesReference.bytes(brokenRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(), IndexVersion.current())) + ), + StandardOpenOption.TRUNCATE_EXISTING + ); + + assertAcked( + client().admin() + .cluster() + .preparePutRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + .setType(FsRepository.TYPE) + .setSettings(repoSettings) + ); + + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(getAnomalies(ObjectPath.createFromResponse(response)), equalTo(Set.of("shard generation not defined"))); + } + + public void testSnapshotNotInShardGeneration() throws IOException { + final var testContext = createTestContext(); + runInconsistentShardGenerationBlobTest( + testContext, + blobStoreIndexShardSnapshots -> blobStoreIndexShardSnapshots.withRetainedSnapshots( + testContext.snapshotNames().stream().skip(1).map(n -> new SnapshotId(n, "_na_")).collect(Collectors.toSet()) + ), + "snapshot not in shard generation" + ); + } + + public void testBlobInShardGenerationButNotSnapshot() throws IOException { + final var testContext = createTestContext(); + final var snapshotToUpdate = randomFrom(testContext.snapshotNames()); + runInconsistentShardGenerationBlobTest(testContext, blobStoreIndexShardSnapshots -> { + BlobStoreIndexShardSnapshots result = BlobStoreIndexShardSnapshots.EMPTY; + for (final var snapshotFiles : blobStoreIndexShardSnapshots.snapshots()) { + if (snapshotFiles.snapshot().equals(snapshotToUpdate)) { + result = result.withAddedSnapshot( + new SnapshotFiles( + snapshotToUpdate, + CollectionUtils.appendToCopy( + snapshotFiles.indexFiles(), + new BlobStoreIndexShardSnapshot.FileInfo( + "extra", + new StoreFileMetadata("extra", 1L, "checksum", Version.CURRENT.toString()), + ByteSizeValue.ONE + ) + ), + snapshotFiles.shardStateIdentifier() + ) + ); + } else { + result = result.withAddedSnapshot(snapshotFiles); + } + } + return result; + }, "blob in shard generation but not snapshot"); + } + + public void testSnapshotShardGenerationMismatch() throws IOException { + final var testContext = createTestContext(); + runInconsistentShardGenerationBlobTest(testContext, blobStoreIndexShardSnapshots -> { + final var fileToUpdate = randomFrom(blobStoreIndexShardSnapshots.iterator().next().indexFiles()); + final var updatedFile = new BlobStoreIndexShardSnapshot.FileInfo( + fileToUpdate.name(), + fileToUpdate.metadata(), + ByteSizeValue.ONE + ); + assertFalse(fileToUpdate.isSame(updatedFile)); + + BlobStoreIndexShardSnapshots result = BlobStoreIndexShardSnapshots.EMPTY; + for (final var snapshotFiles : blobStoreIndexShardSnapshots.snapshots()) { + result = result.withAddedSnapshot( + new SnapshotFiles( + snapshotFiles.snapshot(), + snapshotFiles.indexFiles() + .stream() + .map(fileInfo -> fileInfo.name().equals(fileToUpdate.name()) ? updatedFile : fileInfo) + .toList(), + snapshotFiles.shardStateIdentifier() + ) + ); + } + return result; + }, "snapshot shard generation mismatch"); + } + + public void testBlobInSnapshotNotShardGeneration() throws IOException { + final var testContext = createTestContext(); + final var snapshotToUpdate = randomFrom(testContext.snapshotNames()); + runInconsistentShardGenerationBlobTest(testContext, blobStoreIndexShardSnapshots -> { + BlobStoreIndexShardSnapshots result = BlobStoreIndexShardSnapshots.EMPTY; + for (final var snapshotFiles : blobStoreIndexShardSnapshots.snapshots()) { + if (snapshotFiles.snapshot().equals(snapshotToUpdate)) { + final var indexFilesCopy = new ArrayList<>(snapshotFiles.indexFiles()); + indexFilesCopy.remove(between(0, indexFilesCopy.size() - 1)); + result = result.withAddedSnapshot( + new SnapshotFiles(snapshotToUpdate, indexFilesCopy, snapshotFiles.shardStateIdentifier()) + ); + } else { + result = result.withAddedSnapshot(snapshotFiles); + } + } + return result; + }, "blob in snapshot but not shard generation"); + } + + private void runInconsistentShardGenerationBlobTest( + TestContext testContext, + UnaryOperator shardGenerationUpdater, + String expectedAnomaly + ) throws IOException { + + final var shardGenerationBlob = BlobStoreCorruptionUtils.getRandomFileToCorrupt( + testContext.repositoryRootPath(), + RepositoryFileType.SHARD_GENERATION + ); + + final BlobStoreIndexShardSnapshots blobStoreIndexShardSnapshots; + try (var inputStream = Files.newInputStream(shardGenerationBlob)) { + blobStoreIndexShardSnapshots = INDEX_SHARD_SNAPSHOTS_FORMAT.deserialize( + testContext.repositoryName(), + xContentRegistry(), + inputStream + ); + } + + final Response response; + try (var ignored = new BlobStoreIndexShardSnapshotsIntegritySuppressor()) { + try (var outputStream = Files.newOutputStream(shardGenerationBlob)) { + INDEX_SHARD_SNAPSHOTS_FORMAT.serialize( + shardGenerationUpdater.apply(blobStoreIndexShardSnapshots), + shardGenerationBlob.toString(), + randomBoolean(), + outputStream + ); + } + response = getRestClient().performRequest(testContext.getVerifyIntegrityRequest()); + } finally { + assertAcked( + client().admin().cluster().prepareDeleteRepository(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, testContext.repositoryName()) + ); + } + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(getAnomalies(ObjectPath.createFromResponse(response)), equalTo(Set.of(expectedAnomaly))); + } + + private Set getAnomalies(ObjectPath responseObjectPath) throws IOException { + final var logEntryCount = responseObjectPath.evaluateArraySize("log"); + final var anomalies = new HashSet(); + for (int i = 0; i < logEntryCount; i++) { + final String maybeAnomaly = responseObjectPath.evaluate("log." + i + ".anomaly"); + if (maybeAnomaly != null) { + anomalies.add(maybeAnomaly); + } + } + + assertThat(responseObjectPath.evaluate("results.total_anomalies"), greaterThanOrEqualTo(anomalies.size())); + if (anomalies.size() > 0) { + assertEquals("fail", responseObjectPath.evaluate("results.result")); + } + + return anomalies; + } + + private record TestContext(String repositoryName, Path repositoryRootPath, List indexNames, List snapshotNames) { + Request getVerifyIntegrityRequest() { + final var request = new Request("POST", "/_snapshot/" + repositoryName + "/_verify_integrity"); + if (randomBoolean()) { + request.addParameter("human", null); + } + if (randomBoolean()) { + request.addParameter("pretty", null); + } + return request; + } + } + + private TestContext createTestContext() { + final var repositoryName = randomIdentifier(); + final var repositoryRootPath = randomRepoPath(); + + createRepository(repositoryName, FsRepository.TYPE, repositoryRootPath); + + final var indexNames = randomList(1, 3, ESTestCase::randomIdentifier); + for (var indexName : indexNames) { + createIndexWithRandomDocs(indexName, between(1, 100)); + flushAndRefresh(indexName); + } + + final var snapshotNames = randomList(1, 3, ESTestCase::randomIdentifier); + for (var snapshotName : snapshotNames) { + createSnapshot(repositoryName, snapshotName, indexNames); + } + + return new TestContext(repositoryName, repositoryRootPath, indexNames, snapshotNames); + } + + private static String getCoordinatingNodeName() { + if (internalCluster().size() == 1) { + internalCluster().startNode(); + } + return randomValueOtherThan(internalCluster().getMasterName(), () -> internalCluster().getRandomNodeName()); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java new file mode 100644 index 0000000000000..70385cdc4cf04 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/module-info.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +module org.elasticsearch.repositories.blobstore.testkit { + requires org.elasticsearch.base; + requires org.elasticsearch.server; + requires org.elasticsearch.xcontent; + requires org.elasticsearch.xcore; + + requires org.apache.logging.log4j; + requires org.apache.lucene.core; + requires org.elasticsearch.logging; + + exports org.elasticsearch.repositories.blobstore.testkit.analyze; + exports org.elasticsearch.repositories.blobstore.testkit.integrity; + + provides org.elasticsearch.features.FeatureSpecification + with + org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKitFeatures; +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java index 04d59906e6db3..b0ae1b0752b71 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java @@ -22,8 +22,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.testkit.analyze.RepositoryAnalyzeAction; import org.elasticsearch.repositories.blobstore.testkit.analyze.RestRepositoryAnalyzeAction; +import org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityTask; +import org.elasticsearch.repositories.blobstore.testkit.integrity.RestRepositoryVerifyIntegrityAction; +import org.elasticsearch.repositories.blobstore.testkit.integrity.TransportRepositoryVerifyIntegrityCoordinationAction; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -35,7 +39,13 @@ public class SnapshotRepositoryTestKit extends Plugin implements ActionPlugin { @Override public List> getActions() { - return List.of(new ActionHandler<>(RepositoryAnalyzeAction.INSTANCE, RepositoryAnalyzeAction.class)); + return List.of( + new ActionHandler<>(RepositoryAnalyzeAction.INSTANCE, RepositoryAnalyzeAction.class), + new ActionHandler<>( + TransportRepositoryVerifyIntegrityCoordinationAction.INSTANCE, + TransportRepositoryVerifyIntegrityCoordinationAction.class + ) + ); } @Override @@ -50,7 +60,7 @@ public List getRestHandlers( Supplier nodesInCluster, Predicate clusterSupportsFeature ) { - return List.of(new RestRepositoryAnalyzeAction()); + return List.of(new RestRepositoryAnalyzeAction(), new RestRepositoryVerifyIntegrityAction()); } public static void humanReadableNanos(XContentBuilder builder, String rawFieldName, String readableFieldName, long nanos) @@ -63,4 +73,15 @@ public static void humanReadableNanos(XContentBuilder builder, String rawFieldNa builder.field(rawFieldName, nanos); } + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + Task.Status.class, + RepositoryVerifyIntegrityTask.Status.NAME, + RepositoryVerifyIntegrityTask.Status::new + ) + ); + } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java new file mode 100644 index 0000000000000..cc513a948519b --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKitFeatures.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +import static org.elasticsearch.repositories.blobstore.testkit.integrity.RestRepositoryVerifyIntegrityAction.REPOSITORY_VERIFY_INTEGRITY_FEATURE; + +public class SnapshotRepositoryTestKitFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(REPOSITORY_VERIFY_INTEGRITY_FEATURE); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/ActiveRepositoryVerifyIntegrityTasks.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/ActiveRepositoryVerifyIntegrityTasks.java new file mode 100644 index 0000000000000..ac410465c3deb --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/ActiveRepositoryVerifyIntegrityTasks.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; + +import java.util.Map; + +/** + * The repository-verify-integrity tasks that this node is currently coordinating. + */ +class ActiveRepositoryVerifyIntegrityTasks { + + private final Map responseStreamsByCoordinatingTaskId = ConcurrentCollections + .newConcurrentMap(); + + Releasable registerResponseBuilder(long coordinatingTaskId, RepositoryVerifyIntegrityResponseStream responseStream) { + assert responseStream.hasReferences(); // ref held until the REST-layer listener is completed + + final var previous = responseStreamsByCoordinatingTaskId.putIfAbsent(coordinatingTaskId, responseStream); + if (previous != null) { + final var exception = new IllegalStateException("already executing verify task [" + coordinatingTaskId + "]"); + assert false : exception; + throw exception; + } + + return Releasables.assertOnce(() -> { + final var removed = responseStreamsByCoordinatingTaskId.remove(coordinatingTaskId, responseStream); + if (removed == false) { + final var exception = new IllegalStateException("already completed verify task [" + coordinatingTaskId + "]"); + assert false : exception; + throw exception; + } + }); + } + + /** + * Obtain the response stream for the given coordinating-node task ID, and increment its refcount. + * @throws ResourceNotFoundException if the task is not running or its refcount already reached zero (likely because it completed) + */ + RepositoryVerifyIntegrityResponseStream acquireResponseStream(long taskId) { + final var outerRequest = responseStreamsByCoordinatingTaskId.get(taskId); + if (outerRequest == null || outerRequest.tryIncRef() == false) { + throw new ResourceNotFoundException("verify task [" + taskId + "] not found"); + } + return outerRequest; + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/IndexDescription.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/IndexDescription.java new file mode 100644 index 0000000000000..e13d970346868 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/IndexDescription.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * Details of an index in a specific snapshot, identifying its corresponding {@link org.elasticsearch.cluster.metadata.IndexMetadata} blob + * and the number of shards. + */ +public record IndexDescription(IndexId indexId, @Nullable String indexMetadataBlob, int shardCount) implements Writeable, ToXContentObject { + + public IndexDescription { + if (indexId == null || shardCount < 0) { + throw new IllegalArgumentException("invalid IndexDescription"); + } + } + + public IndexDescription(StreamInput in) throws IOException { + this(new IndexId(in), in.readOptionalString(), in.readVInt()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + indexId.writeTo(out); + out.writeOptionalString(indexMetadataBlob); + out.writeVInt(shardCount); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("name", indexId.getName()); + builder.field("uuid", indexId.getId()); + if (indexMetadataBlob != null) { + builder.field("metadata_blob", indexMetadataBlob); + } + if (shardCount > 0) { + builder.field("shards", shardCount); + } + return builder.endObject(); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java new file mode 100644 index 0000000000000..a5c81d18071fc --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java @@ -0,0 +1,949 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RateLimiter; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.BlobMetadata; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThrottledIterator; +import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Strings; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; +import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; +import org.elasticsearch.index.snapshots.blobstore.RateLimitingInputStream; +import org.elasticsearch.index.snapshots.blobstore.SlicedInputStream; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.RepositoryVerificationException; +import org.elasticsearch.repositories.ShardGeneration; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.BiConsumer; +import java.util.function.BooleanSupplier; +import java.util.function.LongSupplier; + +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; + +class RepositoryIntegrityVerifier { + private static final Logger logger = LogManager.getLogger(RepositoryIntegrityVerifier.class); + + private final LongSupplier currentTimeMillisSupplier; + private final BlobStoreRepository blobStoreRepository; + private final RepositoryVerifyIntegrityResponseChunk.Writer responseChunkWriter; + private final String repositoryName; + private final RepositoryVerifyIntegrityParams requestParams; + private final RepositoryData repositoryData; + private final BooleanSupplier isCancelledSupplier; + private final CancellableRunner metadataTaskRunner; + private final CancellableRunner snapshotTaskRunner; + private final RateLimiter rateLimiter; + + private final Set unreadableSnapshotInfoUuids = ConcurrentCollections.newConcurrentSet(); + private final long snapshotCount; + private final AtomicLong snapshotProgress = new AtomicLong(); + private final long indexCount; + private final AtomicLong indexProgress = new AtomicLong(); + private final long indexSnapshotCount; + private final AtomicLong indexSnapshotProgress = new AtomicLong(); + private final AtomicLong blobsVerified = new AtomicLong(); + private final AtomicLong blobBytesVerified = new AtomicLong(); + private final AtomicLong throttledNanos; + private final AtomicLong failedShardSnapshotsCount = new AtomicLong(); + private final Set failedShardSnapshotDescriptions = ConcurrentCollections.newConcurrentSet(); + + RepositoryIntegrityVerifier( + LongSupplier currentTimeMillisSupplier, + BlobStoreRepository blobStoreRepository, + RepositoryVerifyIntegrityResponseChunk.Writer responseChunkWriter, + RepositoryVerifyIntegrityParams requestParams, + RepositoryData repositoryData, + CancellableThreads cancellableThreads + ) { + this.currentTimeMillisSupplier = currentTimeMillisSupplier; + this.blobStoreRepository = blobStoreRepository; + this.repositoryName = blobStoreRepository.getMetadata().name(); + this.responseChunkWriter = responseChunkWriter; + this.requestParams = requestParams; + this.repositoryData = repositoryData; + this.isCancelledSupplier = cancellableThreads::isCancelled; + this.snapshotTaskRunner = new CancellableRunner( + new ThrottledTaskRunner( + "verify-blob", + requestParams.blobThreadPoolConcurrency(), + blobStoreRepository.threadPool().executor(ThreadPool.Names.SNAPSHOT) + ), + cancellableThreads + ); + this.metadataTaskRunner = new CancellableRunner( + new ThrottledTaskRunner( + "verify-metadata", + requestParams.metaThreadPoolConcurrency(), + blobStoreRepository.threadPool().executor(ThreadPool.Names.SNAPSHOT_META) + ), + cancellableThreads + ); + + this.snapshotCount = repositoryData.getSnapshotIds().size(); + this.indexCount = repositoryData.getIndices().size(); + this.indexSnapshotCount = repositoryData.getIndexSnapshotCount(); + this.rateLimiter = new RateLimiter.SimpleRateLimiter(requestParams.maxBytesPerSec().getMbFrac()); + + this.throttledNanos = new AtomicLong(requestParams.verifyBlobContents() ? 1 : 0); // nonzero if verifying so status reported + } + + RepositoryVerifyIntegrityTask.Status getStatus() { + return new RepositoryVerifyIntegrityTask.Status( + repositoryName, + repositoryData.getGenId(), + repositoryData.getUuid(), + snapshotCount, + snapshotProgress.get(), + indexCount, + indexProgress.get(), + indexSnapshotCount, + indexSnapshotProgress.get(), + blobsVerified.get(), + blobBytesVerified.get(), + throttledNanos.get() + ); + } + + void start(ActionListener listener) { + logger.info( + """ + [{}] verifying metadata integrity for index generation [{}]: \ + repo UUID [{}], cluster UUID [{}], snapshots [{}], indices [{}], index snapshots [{}]""", + repositoryName, + repositoryData.getGenId(), + repositoryData.getUuid(), + repositoryData.getClusterUUID(), + getSnapshotCount(), + getIndexCount(), + getIndexSnapshotCount() + ); + + SubscribableListener + // first verify the top-level properties of the snapshots + .newForked(this::verifySnapshots) + .andThen(this::checkFailedShardSnapshotCount) + // then verify the restorability of each index + .andThen(this::verifyIndices) + .andThenAccept(v -> this.ensureNotCancelled()) + // see if the repository data has changed + .andThen( + l -> blobStoreRepository.getRepositoryData(blobStoreRepository.threadPool().executor(ThreadPool.Names.MANAGEMENT), l) + ) + // log the completion and return the result + .addListener(new ActionListener<>() { + @Override + public void onResponse(RepositoryData finalRepositoryData) { + logger.info( + "[{}] completed verifying metadata integrity for index generation [{}]: repo UUID [{}], cluster UUID [{}]", + repositoryName, + repositoryData.getGenId(), + repositoryData.getUuid(), + repositoryData.getClusterUUID() + ); + listener.onResponse(new RepositoryVerifyIntegrityResponse(getStatus(), finalRepositoryData.getGenId())); + } + + @Override + public void onFailure(Exception e) { + logger.warn( + () -> Strings.format( + "[%s] failed verifying metadata integrity for index generation [%d]: repo UUID [%s], cluster UUID [%s]", + repositoryName, + repositoryData.getGenId(), + repositoryData.getUuid(), + repositoryData.getClusterUUID() + ), + e + ); + listener.onFailure(e); + } + }); + } + + private void ensureNotCancelled() { + if (isCancelledSupplier.getAsBoolean()) { + throw new TaskCancelledException("task cancelled"); + } + } + + private void verifySnapshots(ActionListener listener) { + new SnapshotsVerifier().run(listener); + } + + /** + * Verifies the top-level snapshot metadata in the repo, including {@link SnapshotInfo} and optional {@link Metadata} blobs. + */ + private class SnapshotsVerifier { + final Map> indexNamesBySnapshotName; + + SnapshotsVerifier() { + indexNamesBySnapshotName = Maps.newHashMapWithExpectedSize(repositoryData.getIndices().size()); + for (final var indexId : repositoryData.getIndices().values()) { + for (final var snapshotId : repositoryData.getSnapshots(indexId)) { + indexNamesBySnapshotName.computeIfAbsent(snapshotId.getName(), ignored -> new HashSet<>()).add(indexId.getName()); + } + } + } + + void run(ActionListener listener) { + var listeners = new RefCountingListener(listener); + runThrottled( + Iterators.failFast( + repositoryData.getSnapshotIds().iterator(), + () -> isCancelledSupplier.getAsBoolean() || listeners.isFailing() + ), + (releasable, snapshotId) -> new SnapshotVerifier(snapshotId).run( + ActionListener.assertOnce(ActionListener.releaseAfter(listeners.acquire(), releasable)) + ), + requestParams.snapshotVerificationConcurrency(), + snapshotProgress, + listeners + ); + } + + /** + * Verifies a single snapshot's metadata, including its {@link SnapshotInfo} and optional {@link Metadata} blobs. + */ + private class SnapshotVerifier { + private final SnapshotId snapshotId; + + SnapshotVerifier(SnapshotId snapshotId) { + this.snapshotId = snapshotId; + } + + void run(ActionListener listener) { + if (isCancelledSupplier.getAsBoolean()) { + // getSnapshotInfo does its own forking, so we must check for cancellation here + listener.onResponse(null); + return; + } + + blobStoreRepository.getSnapshotInfo(snapshotId, new ActionListener<>() { + @Override + public void onResponse(SnapshotInfo snapshotInfo) { + verifySnapshotInfo(snapshotInfo, listener); + } + + @Override + public void onFailure(Exception e) { + unreadableSnapshotInfoUuids.add(snapshotId.getUUID()); + anomaly("failed to load snapshot info").snapshotId(snapshotId).exception(e).write(listener); + } + }); + } + + void verifySnapshotInfo(SnapshotInfo snapshotInfo, ActionListener listener) { + final var chunkBuilder = new RepositoryVerifyIntegrityResponseChunk.Builder( + responseChunkWriter, + RepositoryVerifyIntegrityResponseChunk.Type.SNAPSHOT_INFO, + currentTimeMillisSupplier.getAsLong() + ).snapshotInfo(snapshotInfo); + + // record the SnapshotInfo in the response + final var chunkWrittenStep = SubscribableListener.newForked(chunkBuilder::write); + + if (failedShardSnapshotsCount.get() < requestParams.maxFailedShardSnapshots()) { + for (final var shardFailure : snapshotInfo.shardFailures()) { + if (failedShardSnapshotsCount.getAndIncrement() < requestParams.maxFailedShardSnapshots()) { + failedShardSnapshotDescriptions.add( + getShardSnapshotDescription(snapshotId, shardFailure.index(), shardFailure.shardId()) + ); + } + } + } else { + failedShardSnapshotsCount.addAndGet(snapshotInfo.shardFailures().size()); + } + + // check the indices in the SnapshotInfo match those in RepositoryData + final var snapshotContentsOkStep = chunkWrittenStep.andThen(l -> { + if (Set.copyOf(snapshotInfo.indices()).equals(indexNamesBySnapshotName.get(snapshotId.getName()))) { + l.onResponse(null); + } else { + anomaly("snapshot contents mismatch").snapshotId(snapshotId).write(l); + } + }); + + // check the global metadata is readable if present + final var globalMetadataOkStep = Boolean.TRUE.equals(snapshotInfo.includeGlobalState()) + ? snapshotContentsOkStep.andThen(this::verifySnapshotGlobalMetadata) + : snapshotContentsOkStep; + + globalMetadataOkStep.addListener(listener); + } + + private void verifySnapshotGlobalMetadata(ActionListener listener) { + metadataTaskRunner.run(ActionRunnable.wrap(listener, l -> { + try { + blobStoreRepository.getSnapshotGlobalMetadata(snapshotId); + // no checks here, loading it is enough + l.onResponse(null); + } catch (Exception e) { + anomaly("failed to load global metadata").snapshotId(snapshotId).exception(e).write(l); + } + })); + } + } + } + + private void checkFailedShardSnapshotCount(ActionListener listener) { + if (failedShardSnapshotDescriptions.size() < failedShardSnapshotsCount.get()) { + listener.onFailure( + new RepositoryVerificationException( + repositoryName, + Strings.format( + """ + Cannot verify the integrity of all index snapshots because this repository contains too many shard snapshot \ + failures: there are [%d] shard snapshot failures but [?%s] is set to [%d]. \ + Please increase this limit if it is safe to do so.""", + failedShardSnapshotsCount.get(), + RepositoryVerifyIntegrityParams.MAX_FAILED_SHARD_SNAPSHOTS, + requestParams.maxFailedShardSnapshots() + ) + ) + ); + } else { + listener.onResponse(null); + } + } + + private void verifyIndices(ActionListener listener) { + var listeners = new RefCountingListener(listener); + runThrottled( + Iterators.failFast( + repositoryData.getIndices().values().iterator(), + () -> isCancelledSupplier.getAsBoolean() || listeners.isFailing() + ), + (releasable, indexId) -> new IndexVerifier(indexId).run(ActionListener.releaseAfter(listeners.acquire(), releasable)), + requestParams.indexVerificationConcurrency(), + indexProgress, + listeners + ); + } + + /** + * Verifies the integrity of the snapshots of a specific index + */ + private class IndexVerifier { + private final IndexId indexId; + private final ShardContainerContentsDeduplicator shardContainerContentsDeduplicator = new ShardContainerContentsDeduplicator(); + private final IndexDescriptionsDeduplicator indexDescriptionsDeduplicator = new IndexDescriptionsDeduplicator(); + private final AtomicInteger totalSnapshotCounter = new AtomicInteger(); + private final AtomicInteger restorableSnapshotCounter = new AtomicInteger(); + + IndexVerifier(IndexId indexId) { + this.indexId = indexId; + } + + void run(ActionListener listener) { + SubscribableListener + + .newForked(l -> { + var listeners = new RefCountingListener(1, l); + runThrottled( + Iterators.failFast( + repositoryData.getSnapshots(indexId).iterator(), + () -> isCancelledSupplier.getAsBoolean() || listeners.isFailing() + ), + (releasable, snapshotId) -> verifyIndexSnapshot( + snapshotId, + ActionListener.releaseAfter(listeners.acquire(), releasable) + ), + requestParams.indexSnapshotVerificationConcurrency(), + indexSnapshotProgress, + listeners + ); + }) + .andThen(l -> { + ensureNotCancelled(); + new RepositoryVerifyIntegrityResponseChunk.Builder( + responseChunkWriter, + RepositoryVerifyIntegrityResponseChunk.Type.INDEX_RESTORABILITY, + currentTimeMillisSupplier.getAsLong() + ).indexRestorability(indexId, totalSnapshotCounter.get(), restorableSnapshotCounter.get()).write(l); + }) + .addListener(listener); + } + + private void verifyIndexSnapshot(SnapshotId snapshotId, ActionListener listener) { + totalSnapshotCounter.incrementAndGet(); + indexDescriptionsDeduplicator.get(snapshotId).andThen((l, indexDescription) -> { + if (indexDescription == null) { + // index metadata was unreadable; anomaly already reported, skip further verification of this index snapshot + l.onResponse(null); + } else { + new ShardSnapshotsVerifier(snapshotId, indexDescription).run(l); + } + }).addListener(listener); + } + + /** + * Information about the contents of the {@code ${REPO}/indices/${INDEX}/${SHARD}/} container, shared across the verifications of + * each snapshot of this shard. + * + * @param shardId the numeric shard ID. + * @param blobsByName the {@link BlobMetadata} for every blob in the container, keyed by blob name. + * @param shardGeneration the current {@link ShardGeneration} for this shard, identifying the current {@code index-${UUID}} blob. + * @param filesByPhysicalNameBySnapshotName a {@link BlobStoreIndexShardSnapshot.FileInfo} for every tracked file, keyed by snapshot + * name and then by the file's physical name. + * @param blobContentsListeners a threadsafe mutable map, keyed by file name, for every tracked file that the verification process + * encounters. Used to avoid double-counting the size of any files, and also to deduplicate work to + * verify their contents if {@code ?verify_blob_contents} is set. + */ + private record ShardContainerContents( + int shardId, + Map blobsByName, + @Nullable /* if shard gen is not defined */ + ShardGeneration shardGeneration, + @Nullable /* if shard gen blob could not be read */ + Map> filesByPhysicalNameBySnapshotName, + Map> blobContentsListeners + ) {} + + /** + * Verifies the integrity of the shard snapshots of a specific index snapshot + */ + private class ShardSnapshotsVerifier { + private final SnapshotId snapshotId; + private final IndexDescription indexDescription; + private final AtomicInteger restorableShardCount = new AtomicInteger(); + + ShardSnapshotsVerifier(SnapshotId snapshotId, IndexDescription indexDescription) { + this.snapshotId = snapshotId; + this.indexDescription = indexDescription; + } + + void run(ActionListener listener) { + try (var listeners = new RefCountingListener(1, listener.map(v -> { + if (unreadableSnapshotInfoUuids.contains(snapshotId.getUUID()) == false + && indexDescription.shardCount() == restorableShardCount.get()) { + restorableSnapshotCounter.incrementAndGet(); + } + return v; + }))) { + for (int shardId = 0; shardId < indexDescription.shardCount(); shardId++) { + if (failedShardSnapshotDescriptions.contains(getShardSnapshotDescription(snapshotId, indexId.getName(), shardId))) { + continue; + } + + shardContainerContentsDeduplicator.get(shardId) + // deduplicating reads of shard container contents + .andThen((l, shardContainerContents) -> { + if (shardContainerContents == null) { + // shard container contents was unreadable; anomaly already reported, skip further verification + l.onResponse(null); + } else { + new ShardSnapshotVerifier(shardContainerContents).run(l); + } + }) + .addListener(listeners.acquire()); + } + } + } + + /** + * Verifies the integrity of a specific shard snapshot + */ + private class ShardSnapshotVerifier { + private final ShardContainerContents shardContainerContents; + private volatile boolean isRestorable = true; + + ShardSnapshotVerifier(ShardContainerContents shardContainerContents) { + this.shardContainerContents = shardContainerContents; + } + + void run(ActionListener listener) { + metadataTaskRunner.run(ActionRunnable.wrap(listener, this::verifyShardSnapshot)); + } + + private void verifyShardSnapshot(ActionListener listener) { + final var shardId = shardContainerContents.shardId(); + final BlobStoreIndexShardSnapshot blobStoreIndexShardSnapshot; + try { + blobStoreIndexShardSnapshot = blobStoreRepository.loadShardSnapshot( + blobStoreRepository.shardContainer(indexId, shardId), + snapshotId + ); + } catch (Exception e) { + anomaly("failed to load shard snapshot").snapshotId(snapshotId) + .shardDescription(indexDescription, shardId) + .exception(e) + .write(listener); + return; + } + + final var listeners = new RefCountingListener(1, listener.map(v -> { + if (isRestorable) { + restorableShardCount.incrementAndGet(); + } + return v; + })); + final var shardGenerationConsistencyListener = listeners.acquire(); + + runThrottled( + Iterators.failFast( + blobStoreIndexShardSnapshot.indexFiles().iterator(), + () -> isCancelledSupplier.getAsBoolean() || listeners.isFailing() + ), + (releasable, fileInfo) -> verifyFileInfo(fileInfo, ActionListener.releaseAfter(listeners.acquire(), releasable)), + 1, + blobsVerified, + listeners + ); + + // NB this next step doesn't matter for restorability, it is just verifying that the shard gen blob matches the shard + // snapshot blob + verifyShardGenerationConsistency(blobStoreIndexShardSnapshot, shardGenerationConsistencyListener); + } + + /** + * Checks that the given {@link org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo} matches + * the actual blob in the repository. + */ + private void verifyFileInfo(BlobStoreIndexShardSnapshot.FileInfo fileInfo, ActionListener listener) { + if (fileInfo.metadata().hashEqualsContents()) { + listener.onResponse(null); + return; + } + + for (int partIndex = 0; partIndex < fileInfo.numberOfParts(); partIndex++) { + final var blobName = fileInfo.partName(partIndex); + final var blobInfo = shardContainerContents.blobsByName().get(blobName); + if (blobInfo == null) { + isRestorable = false; + String physicalFileName = fileInfo.physicalName(); + anomaly("missing blob").snapshotId(snapshotId) + .shardDescription(indexDescription, shardContainerContents.shardId()) + .blobName(blobName, physicalFileName) + .part(partIndex, fileInfo.numberOfParts()) + .fileLength(ByteSizeValue.ofBytes(fileInfo.length())) + .partLength(ByteSizeValue.ofBytes(fileInfo.partBytes(partIndex))) + .write(listener); + return; + } else if (blobInfo.length() != fileInfo.partBytes(partIndex)) { + isRestorable = false; + String physicalFileName = fileInfo.physicalName(); + ByteSizeValue blobLength = ByteSizeValue.ofBytes(blobInfo.length()); + anomaly("mismatched blob length").snapshotId(snapshotId) + .shardDescription(indexDescription, shardContainerContents.shardId()) + .blobName(blobName, physicalFileName) + .part(partIndex, fileInfo.numberOfParts()) + .fileLength(ByteSizeValue.ofBytes(fileInfo.length())) + .partLength(ByteSizeValue.ofBytes(fileInfo.partBytes(partIndex))) + .blobLength(blobLength) + .write(listener); + return; + } + } + + // NB adding a listener whether ?verify_blob_contents is set or not - we want to track the blob sizes either way + blobContentsListeners(indexDescription, shardContainerContents, fileInfo).addListener( + listener.delegateResponse((l, e) -> { + isRestorable = false; + String physicalFileName = fileInfo.physicalName(); + anomaly("corrupt data blob").snapshotId(snapshotId) + .shardDescription(indexDescription, shardContainerContents.shardId()) + .blobName(fileInfo.name(), physicalFileName) + .part(-1, fileInfo.numberOfParts()) + .fileLength(ByteSizeValue.ofBytes(fileInfo.length())) + .exception(e) + .write(l); + }) + ); + } + + /** + * Checks that the shard generation blob has the right content for this shard snapshot. + */ + private void verifyShardGenerationConsistency( + BlobStoreIndexShardSnapshot blobStoreIndexShardSnapshot, + ActionListener listener + ) { + final var summaryFilesByPhysicalNameBySnapshotName = shardContainerContents.filesByPhysicalNameBySnapshotName(); + if (summaryFilesByPhysicalNameBySnapshotName == null) { + // couldn't read shard gen blob at all - already reported, nothing more to do here + listener.onResponse(null); + return; + } + + final var shardId = shardContainerContents.shardId(); + + final var summaryFilesByPhysicalName = summaryFilesByPhysicalNameBySnapshotName.get(snapshotId.getName()); + if (summaryFilesByPhysicalName == null) { + anomaly("snapshot not in shard generation").snapshotId(snapshotId) + .shardDescription(indexDescription, shardId) + .shardGeneration(shardContainerContents.shardGeneration()) + .write(listener); + return; + } + + final var snapshotFiles = getFilesByPhysicalName(blobStoreIndexShardSnapshot.indexFiles()); + + for (final var summaryFile : summaryFilesByPhysicalName.values()) { + final var snapshotFile = snapshotFiles.get(summaryFile.physicalName()); + if (snapshotFile == null) { + anomaly("blob in shard generation but not snapshot").snapshotId(snapshotId) + .shardDescription(indexDescription, shardId) + .shardGeneration(shardContainerContents.shardGeneration()) + .physicalFileName(summaryFile.physicalName()) + .write(listener); + return; + } else if (summaryFile.isSame(snapshotFile) == false) { + anomaly("snapshot shard generation mismatch").snapshotId(snapshotId) + .shardDescription(indexDescription, shardId) + .shardGeneration(shardContainerContents.shardGeneration()) + .physicalFileName(summaryFile.physicalName()) + .write(listener); + return; + } + } + + for (final var snapshotFile : blobStoreIndexShardSnapshot.indexFiles()) { + if (summaryFilesByPhysicalName.get(snapshotFile.physicalName()) == null) { + anomaly("blob in snapshot but not shard generation").snapshotId(snapshotId) + .shardDescription(indexDescription, shardId) + .shardGeneration(shardContainerContents.shardGeneration()) + .physicalFileName(snapshotFile.physicalName()) + .write(listener); + return; + } + } + + listener.onResponse(null); + } + } + } + + /** + * Exposes {@link IndexDescription} per index-metadata-blob (particularly the shard count), caching the value on first read + * to avoid duplicate work. + */ + private class IndexDescriptionsDeduplicator { + private final Map> listenersByBlobId = newConcurrentMap(); + + SubscribableListener get(SnapshotId snapshotId) { + final var indexMetaBlobId = repositoryData.indexMetaDataGenerations().indexMetaBlobId(snapshotId, indexId); + return listenersByBlobId.computeIfAbsent( + indexMetaBlobId, + ignored -> SubscribableListener.newForked( + indexDescriptionListener -> metadataTaskRunner.run( + ActionRunnable.wrap(indexDescriptionListener, l -> load(snapshotId, indexMetaBlobId, l)) + ) + ) + ); + } + + private void load(SnapshotId snapshotId, String indexMetaBlobId, ActionListener listener) { + try { + listener.onResponse( + new IndexDescription( + indexId, + indexMetaBlobId, + blobStoreRepository.getSnapshotIndexMetaData(repositoryData, snapshotId, indexId).getNumberOfShards() + ) + ); + } catch (Exception e) { + anomaly("failed to load index metadata").indexDescription(new IndexDescription(indexId, indexMetaBlobId, 0)) + .exception(e) + .write(listener.map(v -> null)); + } + } + } + + /** + * Exposes {@link ShardContainerContents} per shard, caching the value on the first read to avoid duplicate work. + */ + private class ShardContainerContentsDeduplicator { + private final Map> listenersByShardId = newConcurrentMap(); + + SubscribableListener get(int shardId) { + return listenersByShardId.computeIfAbsent( + shardId, + ignored -> SubscribableListener.newForked( + shardContainerContentsListener -> metadataTaskRunner.run( + ActionRunnable.wrap(shardContainerContentsListener, l -> load(shardId, l)) + ) + ) + ); + } + + private void load(int shardId, ActionListener listener) { + final var indexDescription = new IndexDescription(indexId, null, 0); + + final Map blobsByName; + try { + blobsByName = blobStoreRepository.shardContainer(indexId, shardId).listBlobs(OperationPurpose.REPOSITORY_ANALYSIS); + } catch (Exception e) { + anomaly("failed to list shard container contents").shardDescription(new IndexDescription(indexId, null, 0), shardId) + .exception(e) + .write(listener.map(v -> null)); + return; + } + + final var shardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId); + if (shardGen == null) { + anomaly("shard generation not defined").shardDescription(indexDescription, shardId) + .write( + listener.map( + // NB we don't need the shard gen to do most of the rest of the verification, so we set it to null and + // carry on: + v -> new ShardContainerContents(shardId, blobsByName, null, null, ConcurrentCollections.newConcurrentMap()) + ) + ); + return; + } + + SubscribableListener + // try and load the shard gen blob + .newForked(l -> { + try { + l.onResponse(blobStoreRepository.getBlobStoreIndexShardSnapshots(indexId, shardId, shardGen)); + } catch (Exception e) { + // failing here is not fatal to snapshot restores, only to creating/deleting snapshots, so we can return null + // and carry on with the analysis + anomaly("failed to load shard generation").shardDescription(indexDescription, shardId) + .shardGeneration(shardGen) + .exception(e) + .write(l.map(v -> null)); + } + }) + .andThenApply( + blobStoreIndexShardSnapshots -> new ShardContainerContents( + shardId, + blobsByName, + shardGen, + getFilesByPhysicalNameBySnapshotName(blobStoreIndexShardSnapshots), + ConcurrentCollections.newConcurrentMap() + ) + ) + .addListener(listener); + } + + private static Map> getFilesByPhysicalNameBySnapshotName( + BlobStoreIndexShardSnapshots blobStoreIndexShardSnapshots + ) { + if (blobStoreIndexShardSnapshots == null) { + return null; + } + + final Map> filesByPhysicalNameBySnapshotName = Maps + .newHashMapWithExpectedSize(blobStoreIndexShardSnapshots.snapshots().size()); + for (final var snapshotFiles : blobStoreIndexShardSnapshots.snapshots()) { + filesByPhysicalNameBySnapshotName.put(snapshotFiles.snapshot(), getFilesByPhysicalName(snapshotFiles.indexFiles())); + } + return filesByPhysicalNameBySnapshotName; + } + } + + private SubscribableListener blobContentsListeners( + IndexDescription indexDescription, + ShardContainerContents shardContainerContents, + BlobStoreIndexShardSnapshot.FileInfo fileInfo + ) { + return shardContainerContents.blobContentsListeners().computeIfAbsent(fileInfo.name(), ignored -> { + if (requestParams.verifyBlobContents()) { + return SubscribableListener.newForked(listener -> snapshotTaskRunner.run(ActionRunnable.run(listener, () -> { + try (var slicedStream = new SlicedInputStream(fileInfo.numberOfParts()) { + @Override + protected InputStream openSlice(int slice) throws IOException { + return blobStoreRepository.shardContainer(indexDescription.indexId(), shardContainerContents.shardId()) + .readBlob(OperationPurpose.REPOSITORY_ANALYSIS, fileInfo.partName(slice)); + } + }; + var rateLimitedStream = new RateLimitingInputStream(slicedStream, () -> rateLimiter, throttledNanos::addAndGet); + var indexInput = new IndexInputWrapper(rateLimitedStream, fileInfo.length()) + ) { + CodecUtil.checksumEntireFile(indexInput); + } + }))); + } else { + blobBytesVerified.addAndGet(fileInfo.length()); + return SubscribableListener.newSucceeded(null); + } + }); + } + } + + private static String getShardSnapshotDescription(SnapshotId snapshotId, String index, int shardId) { + return snapshotId.getUUID() + "/" + index + "/" + shardId; + } + + private static Map getFilesByPhysicalName( + List fileInfos + ) { + final Map filesByPhysicalName = Maps.newHashMapWithExpectedSize(fileInfos.size()); + for (final var fileInfo : fileInfos) { + filesByPhysicalName.put(fileInfo.physicalName(), fileInfo); + } + return filesByPhysicalName; + } + + private static void runThrottled( + Iterator iterator, + BiConsumer itemConsumer, + int maxConcurrency, + AtomicLong progressCounter, + Releasable onCompletion + ) { + ThrottledIterator.run(iterator, itemConsumer, maxConcurrency, progressCounter::incrementAndGet, onCompletion::close); + } + + private RepositoryVerifyIntegrityResponseChunk.Builder anomaly(String anomaly) { + return new RepositoryVerifyIntegrityResponseChunk.Builder( + responseChunkWriter, + RepositoryVerifyIntegrityResponseChunk.Type.ANOMALY, + currentTimeMillisSupplier.getAsLong() + ).anomaly(anomaly); + } + + public long getSnapshotCount() { + return snapshotCount; + } + + public long getIndexCount() { + return indexCount; + } + + public long getIndexSnapshotCount() { + return indexSnapshotCount; + } + + private class IndexInputWrapper extends IndexInput { + private final InputStream inputStream; + private final long length; + long filePointer = 0L; + + IndexInputWrapper(InputStream inputStream, long length) { + super(""); + this.inputStream = inputStream; + this.length = length; + } + + @Override + public byte readByte() throws IOException { + if (isCancelledSupplier.getAsBoolean()) { + throw new TaskCancelledException("task cancelled"); + } + final var read = inputStream.read(); + if (read == -1) { + throw new EOFException(); + } + filePointer += 1; + blobBytesVerified.incrementAndGet(); + return (byte) read; + } + + @Override + public void readBytes(byte[] b, int offset, int len) throws IOException { + while (len > 0) { + if (isCancelledSupplier.getAsBoolean()) { + throw new TaskCancelledException("task cancelled"); + } + final var read = inputStream.read(b, offset, len); + if (read == -1) { + throw new EOFException(); + } + filePointer += read; + blobBytesVerified.addAndGet(read); + len -= read; + offset += read; + } + } + + @Override + public void close() {} + + @Override + public long getFilePointer() { + return filePointer; + } + + @Override + public void seek(long pos) { + if (filePointer != pos) { + assert false : "cannot seek"; + throw new UnsupportedOperationException("seek"); + } + } + + @Override + public long length() { + return length; + } + + @Override + public IndexInput slice(String sliceDescription, long offset, long length) { + assert false; + throw new UnsupportedOperationException("slice"); + } + } + + private static class CancellableRunner { + private final ThrottledTaskRunner delegate; + private final CancellableThreads cancellableThreads; + + CancellableRunner(ThrottledTaskRunner delegate, CancellableThreads cancellableThreads) { + this.delegate = delegate; + this.cancellableThreads = cancellableThreads; + } + + void run(AbstractRunnable runnable) { + delegate.enqueueTask(new ActionListener<>() { + @Override + public void onResponse(Releasable releasable) { + try (releasable) { + if (cancellableThreads.isCancelled()) { + runnable.onFailure(new TaskCancelledException("task cancelled")); + } else { + cancellableThreads.execute(runnable::run); + } + } + } + + @Override + public void onFailure(Exception e) { + runnable.onFailure(e); + } + }); + } + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityParams.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityParams.java new file mode 100644 index 0000000000000..61a58c0da8df0 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityParams.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.IOException; +import java.util.Objects; + +/** + * Parameters of a repository-verity-integrity request. + * + * @param repository the name of the repository whose integrity to verify. + * @param metaThreadPoolConcurrency the number of concurrent tasks to execute on the {@link ThreadPool.Names#SNAPSHOT_META} pool, or + * {@code 0} to use a sensible default. + * @param blobThreadPoolConcurrency the number of concurrent tasks to execute on the {@link ThreadPool.Names#SNAPSHOT} pool, or {@code 0} + * to use a sensible default. + * @param snapshotVerificationConcurrency the number of snapshots to verify concurrently, or {@code 0} to use a sensible default. + * @param indexVerificationConcurrency the number of indices to verify concurrently, or {@code 0} to use a sensible default. + * @param indexSnapshotVerificationConcurrency the number of snapshots to verify concurrently for each index, or {@code 0} to use a sensible + * default. + * @param maxFailedShardSnapshots the maximum number of shard snapshots failures to track - we must build a list of all of them in memory + * to avoid reporting spurious anomalies, and this can be overwhelming in a very broken repository. + * @param verifyBlobContents whether to verify the contents of each data blob (which is very expensive). + * @param maxBytesPerSec rate limit to use for blob contents verification. + */ +public record RepositoryVerifyIntegrityParams( + String repository, + int metaThreadPoolConcurrency, + int blobThreadPoolConcurrency, + int snapshotVerificationConcurrency, + int indexVerificationConcurrency, + int indexSnapshotVerificationConcurrency, + int maxFailedShardSnapshots, + boolean verifyBlobContents, + ByteSizeValue maxBytesPerSec +) implements Writeable { + + public static final String MAX_FAILED_SHARD_SNAPSHOTS = "max_failed_shard_snapshots"; + + public RepositoryVerifyIntegrityParams { + Objects.requireNonNull(repository, "repository"); + requireNonNegative("meta_thread_pool_concurrency", metaThreadPoolConcurrency); + requireNonNegative("blob_thread_pool_concurrency", blobThreadPoolConcurrency); + requireNonNegative("snapshot_verification_concurrency", snapshotVerificationConcurrency); + requireNonNegative("index_verification_concurrency", indexVerificationConcurrency); + requireNonNegative("index_snapshot_verification_concurrency", indexSnapshotVerificationConcurrency); + requireNonNegative(MAX_FAILED_SHARD_SNAPSHOTS, maxFailedShardSnapshots); + if (maxBytesPerSec.getBytes() < 1) { + throw new IllegalArgumentException("invalid rate limit"); + } + } + + private static void requireNonNegative(String name, int value) { + if (value < 0) { + throw new IllegalArgumentException("argument [" + name + "] must be at least [0]"); + } + } + + RepositoryVerifyIntegrityParams(RestRequest restRequest) { + this( + restRequest.param("repository"), + restRequest.paramAsInt("meta_thread_pool_concurrency", 0), + restRequest.paramAsInt("blob_thread_pool_concurrency", 0), + restRequest.paramAsInt("snapshot_verification_concurrency", 0), + restRequest.paramAsInt("index_verification_concurrency", 0), + restRequest.paramAsInt("index_snapshot_verification_concurrency", 0), + restRequest.paramAsInt(MAX_FAILED_SHARD_SNAPSHOTS, 0), + restRequest.paramAsBoolean("verify_blob_contents", false), + restRequest.paramAsSize("max_bytes_per_sec", ByteSizeValue.ofMb(40)) + ); + } + + RepositoryVerifyIntegrityParams(StreamInput in) throws IOException { + this( + in.readString(), + in.readVInt(), + in.readVInt(), + in.readVInt(), + in.readVInt(), + in.readVInt(), + in.readVInt(), + in.readBoolean(), + ByteSizeValue.readFrom(in) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(repository); + out.writeVInt(metaThreadPoolConcurrency); + out.writeVInt(blobThreadPoolConcurrency); + out.writeVInt(snapshotVerificationConcurrency); + out.writeVInt(indexVerificationConcurrency); + out.writeVInt(indexSnapshotVerificationConcurrency); + out.writeVInt(maxFailedShardSnapshots); + out.writeBoolean(verifyBlobContents); + maxBytesPerSec.writeTo(out); + } + + public RepositoryVerifyIntegrityParams withResolvedDefaults(ThreadPool.Info metadataThreadPoolInfo) { + if (metaThreadPoolConcurrency > 0 + && blobThreadPoolConcurrency > 0 + && snapshotVerificationConcurrency > 0 + && indexVerificationConcurrency > 0 + && indexSnapshotVerificationConcurrency > 0 + && maxFailedShardSnapshots > 0) { + return this; + } + + final var maxThreads = Math.max(1, metadataThreadPoolInfo.getMax()); + final var halfMaxThreads = Math.max(1, maxThreads / 2); + return new RepositoryVerifyIntegrityParams( + repository, + metaThreadPoolConcurrency > 0 ? metaThreadPoolConcurrency : halfMaxThreads, + blobThreadPoolConcurrency > 0 ? blobThreadPoolConcurrency : 1, + snapshotVerificationConcurrency > 0 ? snapshotVerificationConcurrency : halfMaxThreads, + indexVerificationConcurrency > 0 ? indexVerificationConcurrency : maxThreads, + indexSnapshotVerificationConcurrency > 0 ? indexSnapshotVerificationConcurrency : 1, + maxFailedShardSnapshots > 0 ? maxFailedShardSnapshots : 10000, + verifyBlobContents, + maxBytesPerSec + ); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponse.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponse.java new file mode 100644 index 0000000000000..eff6ed7eb465d --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponse.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +public class RepositoryVerifyIntegrityResponse extends ActionResponse { + private final RepositoryVerifyIntegrityTask.Status finalTaskStatus; + private final long finalRepositoryGeneration; + + RepositoryVerifyIntegrityResponse(RepositoryVerifyIntegrityTask.Status finalTaskStatus, long finalRepositoryGeneration) { + this.finalTaskStatus = finalTaskStatus; + this.finalRepositoryGeneration = finalRepositoryGeneration; + } + + RepositoryVerifyIntegrityResponse(StreamInput in) throws IOException { + finalRepositoryGeneration = in.readLong(); + finalTaskStatus = new RepositoryVerifyIntegrityTask.Status(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(finalRepositoryGeneration); + finalTaskStatus.writeTo(out); + } + + public long finalRepositoryGeneration() { + return finalRepositoryGeneration; + } + + public RepositoryVerifyIntegrityTask.Status finalTaskStatus() { + return finalTaskStatus; + } + + public long originalRepositoryGeneration() { + return finalTaskStatus.repositoryGeneration(); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java new file mode 100644 index 0000000000000..90130811c1218 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseChunk.java @@ -0,0 +1,355 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.ShardGeneration; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * A chunk of response to be streamed to the waiting client. + * + * @param type indicates the type of this chunk. + * @param anomaly a textual description of the anomaly found, or {@code null} if this chunk does not describe an anomaly. + * @param snapshotId the ID of the snapshot to which this chunk pertains, or {@code null} if this chunk does not pertain to a particular + * snapshot. + * @param snapshotInfo the raw {@link SnapshotInfo} for the snapshot, or {@code null}. + * @param indexDescription information about the index to which this chunk pertains, or {@code null} if this chunk does not pertain to + * a particular index. + * @param shardId the ID of the shard to which this chunk pertains, or {@code -1} if this chunk does not pertain to a particular shard. + * @param shardGeneration the {@link ShardGeneration} for the given shard, or {@code null} if not relevant. + * @param blobName the name of the blob to which this chunk pertains, or {@code null} if this chunk does not pertain to a particular blob. + * @param physicalFileName the name of the Lucene file to which this chunk pertains, or {@code null} if this chunk does not pertain to a + * particular Lucene file. + * @param partIndex the index of the part of the file represented by the blob to which this chunk pertains, or {@code -1} if this chunk does + * not pertain to a particular part. + * @param partCount the number of parts into which the file to which this chunk pertains is divided, or {@code -1} if not applicable. + * @param fileLength the length of the Lucene file to which this chunk pertains, or {@link ByteSizeValue#MINUS_ONE} if not applicable. + * @param partLength the length of the file part to which this chunk pertains, or {@link ByteSizeValue#MINUS_ONE} if not applicable. + * @param blobLength the length of the blob to which this chunk pertains, or {@link ByteSizeValue#MINUS_ONE} if not applicable. + * @param totalSnapshotCount the total number of snapshots which involve the index to which this chunk pertains, or {@code -1} if not + * applicable. + * @param restorableSnapshotCount the number of restorable snapshots which involve the index to which this chunk pertains, or {@code -1} if + * not applicable. + * @param exception an exception which relates to the failure described by this chunk, or {@code null} if not applicable. + */ +public record RepositoryVerifyIntegrityResponseChunk( + long timestampMillis, + Type type, + @Nullable String anomaly, + @Nullable SnapshotId snapshotId, + @Nullable SnapshotInfo snapshotInfo, + @Nullable IndexDescription indexDescription, + int shardId, + @Nullable ShardGeneration shardGeneration, + @Nullable String blobName, + @Nullable String physicalFileName, + int partIndex, + int partCount, + ByteSizeValue fileLength, + ByteSizeValue partLength, + ByteSizeValue blobLength, + int totalSnapshotCount, + int restorableSnapshotCount, + @Nullable Exception exception +) implements Writeable, ToXContentFragment { + + public enum Type { + /** + * The first chunk sent. Used to indicate that the verification has successfully started, and therefore we should start to send a + * 200 OK response to the client. + */ + START_RESPONSE, + + /** + * This chunk contains the raw {@link SnapshotInfo} for a snapshot. + */ + SNAPSHOT_INFO, + + /** + * This chunk contains information about the restorability of an index. + */ + INDEX_RESTORABILITY, + + /** + * This chunk describes an anomaly found during verification. + */ + ANOMALY, + } + + public RepositoryVerifyIntegrityResponseChunk { + if (fileLength == null + || partLength == null + || blobLength == null + || shardId < -1 + || partIndex < -1 + || partCount < -1 + || totalSnapshotCount < -1 + || restorableSnapshotCount < -1 + || (totalSnapshotCount >= 0 != restorableSnapshotCount >= 0)) { + throw new IllegalArgumentException("invalid: " + this); + } + } + + public RepositoryVerifyIntegrityResponseChunk(StreamInput in) throws IOException { + this( + in.readVLong(), + // TODO enum serialization tests + in.readEnum(Type.class), + in.readOptionalString(), + in.readOptionalWriteable(SnapshotId::new), + in.readOptionalWriteable(SnapshotInfo::readFrom), + in.readOptionalWriteable(IndexDescription::new), + in.readInt(), + in.readOptionalWriteable(ShardGeneration::new), + in.readOptionalString(), + in.readOptionalString(), + in.readInt(), + in.readInt(), + ByteSizeValue.readFrom(in), + ByteSizeValue.readFrom(in), + ByteSizeValue.readFrom(in), + in.readInt(), + in.readInt(), + in.readOptional(StreamInput::readException) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(timestampMillis); + out.writeEnum(type); + out.writeOptionalString(anomaly); + out.writeOptionalWriteable(snapshotId); + out.writeOptionalWriteable(snapshotInfo); + out.writeOptionalWriteable(indexDescription); + out.writeInt(shardId); + out.writeOptionalWriteable(shardGeneration); + out.writeOptionalString(blobName); + out.writeOptionalString(physicalFileName); + out.writeInt(partIndex); + out.writeInt(partCount); + fileLength.writeTo(out); + partLength.writeTo(out); + blobLength.writeTo(out); + out.writeInt(totalSnapshotCount); + out.writeInt(restorableSnapshotCount); + out.writeOptional(StreamOutput::writeException, exception); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.timeField("timestamp_in_millis", "timestamp", timestampMillis); + + if (anomaly() != null) { + builder.field("anomaly", anomaly()); + } + + if (snapshotInfo() != null) { + builder.field("snapshot"); + snapshotInfo().toXContentExternal(builder, params); + } else if (snapshotId() != null) { + builder.startObject("snapshot"); + builder.field("snapshot", snapshotId().getName()); + builder.field("uuid", snapshotId().getUUID()); + builder.endObject(); + } + + if (indexDescription() != null) { + builder.field("index", indexDescription(), params); + } + if (shardId() >= 0) { + builder.field("shard_id", shardId()); + } + if (shardGeneration() != null) { + builder.field("shard_generation", shardGeneration(), params); + } + if (blobName() != null) { + builder.field("blob_name", blobName()); + } + if (physicalFileName() != null) { + builder.field("physical_file_name", physicalFileName()); + } + if (partIndex() >= 0) { + builder.field("part_index", partIndex()); + } + if (partCount() >= 0) { + builder.field("part_count", partCount()); + } + if (fileLength() != ByteSizeValue.MINUS_ONE) { + builder.humanReadableField("file_length_in_bytes", "file_length", fileLength()); + } + if (partLength() != ByteSizeValue.MINUS_ONE) { + builder.humanReadableField("part_length_in_bytes", "part_length", partLength()); + } + if (blobLength() != ByteSizeValue.MINUS_ONE) { + builder.humanReadableField("blob_length_in_bytes", "blob_length", blobLength()); + } + if (totalSnapshotCount() >= 0 && restorableSnapshotCount() >= 0) { + builder.startObject("snapshot_restorability"); + builder.field("total_snapshot_count", totalSnapshotCount()); + builder.field("restorable_snapshot_count", restorableSnapshotCount()); + builder.endObject(); + } + if (exception() != null) { + builder.startObject("exception") + .value((bb, pp) -> ElasticsearchException.generateFailureXContent(bb, pp, exception(), true)) + .field("status", ExceptionsHelper.status(exception())) + .endObject(); + } + return builder; + } + + static class Builder { + private final Writer responseWriter; + private final Type type; + private final long timestampMillis; + + private String anomaly; + private SnapshotId snapshotId; + private SnapshotInfo snapshotInfo; + private IndexDescription indexDescription; + private int shardId = -1; + private ShardGeneration shardGeneration; + private String blobName; + private String physicalFileName; + private int partIndex = -1; + private int partCount = -1; + private ByteSizeValue fileLength = ByteSizeValue.MINUS_ONE; + private ByteSizeValue partLength = ByteSizeValue.MINUS_ONE; + private ByteSizeValue blobLength = ByteSizeValue.MINUS_ONE; + private int totalSnapshotCount = -1; + private int restorableSnapshotCount = -1; + private Exception exception; + + Builder(Writer responseWriter, Type type, long timestampMillis) { + this.responseWriter = responseWriter; + this.type = type; + this.timestampMillis = timestampMillis; + } + + Builder anomaly(String anomaly) { + this.anomaly = anomaly; + return this; + } + + Builder snapshotId(SnapshotId snapshotId) { + this.snapshotId = snapshotId; + return this; + } + + Builder snapshotInfo(SnapshotInfo snapshotInfo) { + this.snapshotInfo = snapshotInfo; + return this; + } + + Builder indexDescription(IndexDescription indexDescription) { + this.indexDescription = indexDescription; + return this; + } + + Builder shardDescription(IndexDescription indexDescription, int shardId) { + this.indexDescription = indexDescription; + this.shardId = shardId; + return this; + } + + Builder shardGeneration(ShardGeneration shardGeneration) { + this.shardGeneration = shardGeneration; + return this; + } + + Builder blobName(String blobName, String physicalFileName) { + this.blobName = blobName; + this.physicalFileName = physicalFileName; + return this; + } + + Builder physicalFileName(String physicalFileName) { + this.physicalFileName = physicalFileName; + return this; + } + + Builder part(int partIndex, int partCount) { + this.partIndex = partIndex; + this.partCount = partCount; + return this; + } + + Builder fileLength(ByteSizeValue fileLength) { + this.fileLength = Objects.requireNonNull(fileLength); + return this; + } + + Builder partLength(ByteSizeValue partLength) { + this.partLength = Objects.requireNonNull(partLength); + return this; + } + + Builder blobLength(ByteSizeValue blobLength) { + this.blobLength = Objects.requireNonNull(blobLength); + return this; + } + + Builder indexRestorability(IndexId indexId, int totalSnapshotCount, int restorableSnapshotCount) { + this.indexDescription = new IndexDescription(indexId, null, 0); + this.totalSnapshotCount = totalSnapshotCount; + this.restorableSnapshotCount = restorableSnapshotCount; + return this; + } + + Builder exception(Exception exception) { + this.exception = exception; + return this; + } + + void write(ActionListener listener) { + responseWriter.writeResponseChunk( + new RepositoryVerifyIntegrityResponseChunk( + timestampMillis, + type, + anomaly, + snapshotId, + snapshotInfo, + indexDescription, + shardId, + shardGeneration, + blobName, + physicalFileName, + partIndex, + partCount, + fileLength, + partLength, + blobLength, + totalSnapshotCount, + restorableSnapshotCount, + exception + ), + ActionListener.assertOnce(listener) + ); + } + } + + interface Writer { + void writeResponseChunk(RepositoryVerifyIntegrityResponseChunk responseChunk, ActionListener listener); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseStream.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseStream.java new file mode 100644 index 0000000000000..7ea9bfe6f2b23 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityResponseStream.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.StreamingXContentResponse; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Represents a (possibly-streaming) response to the repository-verify-integrity API. + */ +class RepositoryVerifyIntegrityResponseStream extends AbstractRefCounted { + // ref-counting discipline: + // - one ref added at creation in the REST layer and released there by the listener returned from getCompletionListener() + // - one ref held for every response chunk while it is being added to the fragment queue + // thus when all refs are released the transport-layer coordinating action is complete and no more trailing fragments can be added, + // so we can send the last response fragment. + + private static final Logger logger = LogManager.getLogger(RepositoryVerifyIntegrityResponseStream.class); + + private final RestChannel restChannel; + + private final SubscribableListener finalResultListener = new SubscribableListener<>(); + + // the listener exposed to the transport response handler + private final ActionListener completionListener = ActionListener.assertOnce( + ActionListener.releaseAfter(finalResultListener, this::decRef) + ); + + // set in startResponse() which completes before any calls to writeChunk() or closeInternal() so no need to be volatile + @Nullable // if not yet started + private StreamingXContentResponse streamingXContentResponse; + + private final AtomicLong anomalyCount = new AtomicLong(); + + RepositoryVerifyIntegrityResponseStream(RestChannel restChannel) { + this.restChannel = restChannel; + } + + void startResponse(Releasable releasable) throws IOException { + assert hasReferences(); + assert streamingXContentResponse == null; + streamingXContentResponse = new StreamingXContentResponse(restChannel, restChannel.request(), () -> {}); + streamingXContentResponse.writeFragment( + p0 -> ChunkedToXContentHelper.singleChunk((b, p) -> b.startObject().startArray("log")), + releasable + ); + } + + void writeChunk(RepositoryVerifyIntegrityResponseChunk chunk, Releasable releasable) { + assert hasReferences(); + assert streamingXContentResponse != null; + + if (chunk.type() == RepositoryVerifyIntegrityResponseChunk.Type.ANOMALY) { + anomalyCount.incrementAndGet(); + } + streamingXContentResponse.writeFragment( + p0 -> ChunkedToXContentHelper.singleChunk((b, p) -> b.startObject().value(chunk, p).endObject()), + releasable + ); + } + + @Override + protected void closeInternal() { + try { + assert finalResultListener.isDone(); + finalResultListener.addListener(new ActionListener<>() { + @Override + public void onResponse(RepositoryVerifyIntegrityResponse repositoryVerifyIntegrityResponse) { + // success - finish the response with the final results + assert streamingXContentResponse != null; + streamingXContentResponse.writeFragment( + p0 -> ChunkedToXContentHelper.singleChunk( + (b, p) -> b.endArray() + .startObject("results") + .field("status", repositoryVerifyIntegrityResponse.finalTaskStatus()) + .field("final_repository_generation", repositoryVerifyIntegrityResponse.finalRepositoryGeneration()) + .field("total_anomalies", anomalyCount.get()) + .field( + "result", + anomalyCount.get() == 0 + ? repositoryVerifyIntegrityResponse + .originalRepositoryGeneration() == repositoryVerifyIntegrityResponse.finalRepositoryGeneration() + ? "pass" + : "inconclusive due to concurrent writes" + : "fail" + ) + .endObject() + .endObject() + ), + () -> {} + ); + } + + @Override + public void onFailure(Exception e) { + if (streamingXContentResponse != null) { + // failure after starting the response - finish the response with a rendering of the final exception + streamingXContentResponse.writeFragment( + p0 -> ChunkedToXContentHelper.singleChunk( + (b, p) -> b.endArray() + .startObject("exception") + .value((bb, pp) -> ElasticsearchException.generateFailureXContent(bb, pp, e, true)) + .field("status", ExceptionsHelper.status(e)) + .endObject() + .endObject() + ), + () -> {} + ); + } else { + // didn't even get as far as starting to stream the response, must have hit an early exception (e.g. repo not found) + // so we can return this exception directly. + try { + restChannel.sendResponse(new RestResponse(restChannel, e)); + } catch (IOException e2) { + e.addSuppressed(e2); + logger.error("error building error response", e); + assert false : e; // shouldn't actually throw anything here + restChannel.request().getHttpChannel().close(); + } + } + } + }); + } finally { + Releasables.closeExpectNoException(streamingXContentResponse); + } + } + + public ActionListener getCompletionListener() { + return completionListener; + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java new file mode 100644 index 0000000000000..eaae913fe9c6f --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityTask.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Optional; +import java.util.function.Supplier; + +public class RepositoryVerifyIntegrityTask extends CancellableTask { + + private volatile Supplier statusSupplier; + + public RepositoryVerifyIntegrityTask( + long id, + String type, + String action, + String description, + TaskId parentTaskId, + Map headers + ) { + super(id, type, action, description, parentTaskId, headers); + } + + public void setStatusSupplier(Supplier statusSupplier) { + this.statusSupplier = statusSupplier; + } + + @Override + public Status getStatus() { + return Optional.ofNullable(statusSupplier).map(Supplier::get).orElse(null); + } + + public record Status( + String repositoryName, + long repositoryGeneration, + String repositoryUUID, + long snapshotCount, + long snapshotsVerified, + long indexCount, + long indicesVerified, + long indexSnapshotCount, + long indexSnapshotsVerified, + long blobsVerified, + long blobBytesVerified, + long throttledNanos + ) implements org.elasticsearch.tasks.Task.Status { + + public static String NAME = "verify_repository_integrity_status"; + + public Status(StreamInput in) throws IOException { + this( + in.readString(), + in.readVLong(), + in.readString(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong(), + in.readVLong() + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(repositoryName); + out.writeVLong(repositoryGeneration); + out.writeString(repositoryUUID); + out.writeVLong(snapshotCount); + out.writeVLong(snapshotsVerified); + out.writeVLong(indexCount); + out.writeVLong(indicesVerified); + out.writeVLong(indexSnapshotCount); + out.writeVLong(indexSnapshotsVerified); + out.writeVLong(blobsVerified); + out.writeVLong(blobBytesVerified); + out.writeVLong(throttledNanos); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject("repository"); + builder.field("name", repositoryName); + builder.field("uuid", repositoryUUID); + builder.field("generation", repositoryGeneration); + builder.endObject(); + builder.startObject("snapshots"); + builder.field("verified", snapshotsVerified); + builder.field("total", snapshotCount); + builder.endObject(); + builder.startObject("indices"); + builder.field("verified", indicesVerified); + builder.field("total", indexCount); + builder.endObject(); + builder.startObject("index_snapshots"); + builder.field("verified", indexSnapshotsVerified); + builder.field("total", indexSnapshotCount); + builder.endObject(); + builder.startObject("blobs"); + builder.field("verified", blobsVerified); + if (throttledNanos > 0) { + builder.humanReadableField("verified_size_in_bytes", "verified_size", ByteSizeValue.ofBytes(blobBytesVerified)); + builder.humanReadableField("throttled_time_in_millis", "throttled_time", TimeValue.timeValueNanos(throttledNanos)); + } + builder.endObject(); + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java new file mode 100644 index 0000000000000..16cdb9140411c --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RestRepositoryVerifyIntegrityAction.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestCancellableNodeClient; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +@ServerlessScope(Scope.INTERNAL) +public class RestRepositoryVerifyIntegrityAction extends BaseRestHandler { + + public static final NodeFeature REPOSITORY_VERIFY_INTEGRITY_FEATURE = new NodeFeature("snapshot.repository_verify_integrity"); + + @Override + public List routes() { + return List.of(new Route(POST, "/_snapshot/{repository}/_verify_integrity")); + } + + @Override + public String getName() { + return "repository_verify_integrity"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final var requestParams = new RepositoryVerifyIntegrityParams(request); + return channel -> { + final var responseStream = new RepositoryVerifyIntegrityResponseStream(channel); + new RestCancellableNodeClient(client, request.getHttpChannel()).execute( + TransportRepositoryVerifyIntegrityCoordinationAction.INSTANCE, + new TransportRepositoryVerifyIntegrityCoordinationAction.Request(requestParams, responseStream), + responseStream.getCompletionListener() + ); + }; + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java new file mode 100644 index 0000000000000..aa29f83341317 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityAction.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Executor; +import java.util.function.LongSupplier; + +/** + * Transport action that actually runs the {@link RepositoryIntegrityVerifier} and sends response chunks back to the coordinating node. + */ +class TransportRepositoryVerifyIntegrityAction extends HandledTransportAction< + TransportRepositoryVerifyIntegrityAction.Request, + RepositoryVerifyIntegrityResponse> { + + // NB runs on the master because that's the expected place to read metadata blobs from the repository, but not an actual + // TransportMasterNodeAction since we don't want to retry on a master failover + + static final String ACTION_NAME = TransportRepositoryVerifyIntegrityCoordinationAction.INSTANCE.name() + "[m]"; + private final RepositoriesService repositoriesService; + private final TransportService transportService; + private final Executor executor; + + TransportRepositoryVerifyIntegrityAction( + TransportService transportService, + RepositoriesService repositoriesService, + ActionFilters actionFilters, + Executor executor + ) { + super(ACTION_NAME, transportService, actionFilters, TransportRepositoryVerifyIntegrityAction.Request::new, executor); + this.repositoriesService = repositoriesService; + this.transportService = transportService; + this.executor = executor; + } + + static class Request extends ActionRequest { + private final DiscoveryNode coordinatingNode; + private final long coordinatingTaskId; + private final RepositoryVerifyIntegrityParams requestParams; + + Request(DiscoveryNode coordinatingNode, long coordinatingTaskId, RepositoryVerifyIntegrityParams requestParams) { + this.coordinatingNode = coordinatingNode; + this.coordinatingTaskId = coordinatingTaskId; + this.requestParams = Objects.requireNonNull(requestParams); + } + + Request(StreamInput in) throws IOException { + super(in); + coordinatingNode = new DiscoveryNode(in); + coordinatingTaskId = in.readVLong(); + requestParams = new RepositoryVerifyIntegrityParams(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + coordinatingNode.writeTo(out); + out.writeVLong(coordinatingTaskId); + requestParams.writeTo(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new RepositoryVerifyIntegrityTask(id, type, action, getDescription(), parentTaskId, headers); + } + } + + @Override + protected void doExecute(Task rawTask, Request request, ActionListener listener) { + final var responseWriter = new RepositoryVerifyIntegrityResponseChunk.Writer() { + + // no need to obtain a fresh connection each time - this connection shouldn't close, so if it does we can fail the verification + final Transport.Connection responseConnection = transportService.getConnection(request.coordinatingNode); + + @Override + public void writeResponseChunk(RepositoryVerifyIntegrityResponseChunk responseChunk, ActionListener listener) { + transportService.sendChildRequest( + responseConnection, + TransportRepositoryVerifyIntegrityResponseChunkAction.ACTION_NAME, + new TransportRepositoryVerifyIntegrityResponseChunkAction.Request(request.coordinatingTaskId, responseChunk), + rawTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler( + listener.map(ignored -> null), + in -> ActionResponse.Empty.INSTANCE, + executor + ) + ); + } + }; + + final LongSupplier currentTimeMillisSupplier = transportService.getThreadPool()::absoluteTimeInMillis; + final var repository = (BlobStoreRepository) repositoriesService.repository(request.requestParams.repository()); + final var task = (RepositoryVerifyIntegrityTask) rawTask; + + SubscribableListener + + .newForked(l -> repository.getRepositoryData(executor, l)) + .andThenApply(repositoryData -> { + final var cancellableThreads = new CancellableThreads(); + task.addListener(() -> cancellableThreads.cancel("task cancelled")); + final var verifier = new RepositoryIntegrityVerifier( + currentTimeMillisSupplier, + repository, + responseWriter, + request.requestParams.withResolvedDefaults(repository.threadPool().info(ThreadPool.Names.SNAPSHOT_META)), + repositoryData, + cancellableThreads + ); + task.setStatusSupplier(verifier::getStatus); + return verifier; + }) + .andThen( + (l, repositoryIntegrityVerifier) -> new RepositoryVerifyIntegrityResponseChunk.Builder( + responseWriter, + RepositoryVerifyIntegrityResponseChunk.Type.START_RESPONSE, + currentTimeMillisSupplier.getAsLong() + ).write(l.map(ignored -> repositoryIntegrityVerifier)) + ) + .andThen((l, repositoryIntegrityVerifier) -> repositoryIntegrityVerifier.start(l)) + .addListener(listener); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityCoordinationAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityCoordinationAction.java new file mode 100644 index 0000000000000..d5a5749997d8d --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityCoordinationAction.java @@ -0,0 +1,186 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; + +import java.util.Map; +import java.util.concurrent.Executor; + +/** + * Transport action that coordinates the integrity verification, dispatching a request to run the verification on the master and setting up + * the machinery needed to send the response chunks back to the client. + */ +public class TransportRepositoryVerifyIntegrityCoordinationAction extends TransportAction< + TransportRepositoryVerifyIntegrityCoordinationAction.Request, + RepositoryVerifyIntegrityResponse> { + + /* + * Message flow: the coordinating node (the one running this action) forwards the request on to a master node which actually runs the + * verification. The master node in turn sends requests back to this node containing chunks of response, either information about the + * snapshots processed, or about the restorability of the indices in the repository, or details of any verification anomalies found. + * When the process is complete the master responds to the original transport request with the final results: + * + * +---------+ +-------------+ +--------+ + * | Client | | Coordinator | | Master | + * +---------+ +-------------+ +--------+ + * | | | + * |-[REST request]--------------------->| | + * | |---[master node request]----------------->| ----------------------\ + * | | |-| Initialize verifier | + * | | | |---------------------| + * | |<--[START_RESPONSE chunk request]---------| + * |<---[headers & initial JSON body]----| | + * | |---[START_RESPONSE chunk response]------->| ------------------\ + * | | |-| Verify snapshot | + * | | | |-----------------| + * | |<--[SNAPSHOT_INFO chunk request]----------| + * |<---[more JSON body]-----------------| | + * | |---[SNAPSHOT_INFO chunk response]-------->| ------------------\ + * | | |-| Verify snapshot | + * | | | |-----------------| + * | |<--[SNAPSHOT_INFO chunk request]----------| + * |<---[more JSON body]-----------------| | + * | |---[SNAPSHOT_INFO chunk response]-------->| ... + * . . . + * . . . + * | | | -----------------------------\ + * | | |-| Verify index restorability | + * | | | |----------------------------| + * | |<--[INDEX_RESTORABILITY chunk request]----| + * |<---[more JSON body]-----------------| | + * | |---[INDEX_RESTORABILITY chunk response]-->| -----------------------------\ + * | | |-| Verify index restorability | + * | | | |----------------------------| + * | |<--[INDEX_RESTORABILITY chunk request]----| + * |<---[more JSON body]-----------------| | + * | |---[INDEX_RESTORABILITY chunk response]-->| ... + * . . . + * . . . + * | |<--[response to master node request]------| + * |<--[final JSON to complete body]-----| | + * + * This message flow ties the lifecycle of the verification process to that of the transport request sent from coordinator to master, + * which means it integrates well with the tasks framework and handles network issues properly. An alternative would be for the + * coordinator to repeatedly request chunks from the master, but that would mean that there's no one task representing the whole + * process, and it'd be a little tricky for the master node to know if the coordinator has failed and the verification should be + * cancelled. + */ + + public static final ActionType INSTANCE = new ActionType<>( + "cluster:admin/repository/verify_integrity" + ); + + private final ActiveRepositoryVerifyIntegrityTasks activeRepositoryVerifyIntegrityTasks = new ActiveRepositoryVerifyIntegrityTasks(); + + private final TransportService transportService; + private final ClusterService clusterService; + private final Executor managementExecutor; + + public static class Request extends ActionRequest { + private final RepositoryVerifyIntegrityParams requestParams; + private final RepositoryVerifyIntegrityResponseStream responseStream; + + public Request(RepositoryVerifyIntegrityParams requestParams, RepositoryVerifyIntegrityResponseStream responseStream) { + this.requestParams = requestParams; + this.responseStream = responseStream; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public RepositoryVerifyIntegrityParams requestParams() { + return requestParams; + } + + public RepositoryVerifyIntegrityResponseStream responseStream() { + return responseStream; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + } + + @Inject + public TransportRepositoryVerifyIntegrityCoordinationAction( + TransportService transportService, + ClusterService clusterService, + RepositoriesService repositoriesService, + ActionFilters actionFilters + ) { + super( + INSTANCE.name(), + actionFilters, + transportService.getTaskManager(), + transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT) + ); + + this.transportService = transportService; + this.clusterService = clusterService; + this.managementExecutor = transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT); + + // register subsidiary actions + new TransportRepositoryVerifyIntegrityAction(transportService, repositoriesService, actionFilters, managementExecutor); + + new TransportRepositoryVerifyIntegrityResponseChunkAction( + transportService, + actionFilters, + managementExecutor, + activeRepositoryVerifyIntegrityTasks + ); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + ActionListener.run( + ActionListener.releaseAfter( + listener, + activeRepositoryVerifyIntegrityTasks.registerResponseBuilder(task.getId(), request.responseStream()) + ), + l -> { + final var master = clusterService.state().nodes().getMasterNode(); + if (master == null) { + // no waiting around or retries here, we just fail immediately + throw new MasterNotDiscoveredException(); + } + transportService.sendChildRequest( + master, + TransportRepositoryVerifyIntegrityAction.ACTION_NAME, + new TransportRepositoryVerifyIntegrityAction.Request( + transportService.getLocalNode(), + task.getId(), + request.requestParams() + ), + task, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(l, RepositoryVerifyIntegrityResponse::new, managementExecutor) + ); + } + ); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityResponseChunkAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityResponseChunkAction.java new file mode 100644 index 0000000000000..9015866fb3ec2 --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/TransportRepositoryVerifyIntegrityResponseChunkAction.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit.integrity; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Objects; +import java.util.concurrent.Executor; + +/** + * Transport action that handles a response chunk on the coordinating node, sending it out to the REST client. + */ +class TransportRepositoryVerifyIntegrityResponseChunkAction extends HandledTransportAction< + TransportRepositoryVerifyIntegrityResponseChunkAction.Request, + ActionResponse.Empty> { + + static final String ACTION_NAME = TransportRepositoryVerifyIntegrityCoordinationAction.INSTANCE.name() + "[response_chunk]"; + + private final ActiveRepositoryVerifyIntegrityTasks activeRepositoryVerifyIntegrityTasks; + + TransportRepositoryVerifyIntegrityResponseChunkAction( + TransportService transportService, + ActionFilters actionFilters, + Executor executor, + ActiveRepositoryVerifyIntegrityTasks activeRepositoryVerifyIntegrityTasks + ) { + super(ACTION_NAME, transportService, actionFilters, Request::new, executor); + this.activeRepositoryVerifyIntegrityTasks = activeRepositoryVerifyIntegrityTasks; + } + + static class Request extends ActionRequest { + private final long coordinatingTaskId; + private final RepositoryVerifyIntegrityResponseChunk chunkContents; + + Request(long coordinatingTaskId, RepositoryVerifyIntegrityResponseChunk chunkContents) { + this.coordinatingTaskId = coordinatingTaskId; + this.chunkContents = Objects.requireNonNull(chunkContents); + } + + Request(StreamInput in) throws IOException { + super(in); + coordinatingTaskId = in.readVLong(); + chunkContents = new RepositoryVerifyIntegrityResponseChunk(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVLong(coordinatingTaskId); + chunkContents.writeTo(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public RepositoryVerifyIntegrityResponseChunk chunkContents() { + return chunkContents; + } + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + ActionListener.run(listener, l -> { + final var responseStream = activeRepositoryVerifyIntegrityTasks.acquireResponseStream(request.coordinatingTaskId); + try { + if (request.chunkContents().type() == RepositoryVerifyIntegrityResponseChunk.Type.START_RESPONSE) { + responseStream.startResponse(() -> l.onResponse(ActionResponse.Empty.INSTANCE)); + } else { + responseStream.writeChunk(request.chunkContents(), () -> l.onResponse(ActionResponse.Empty.INSTANCE)); + } + } finally { + responseStream.decRef(); + } + }); + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..ae11c3bb39d0b --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,8 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0; you may not use this file except in compliance with the Elastic License +# 2.0. +# + +org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKitFeatures From 75c0fb9488a4e9cdc4df93cdca5782d827ba82c1 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 11 Sep 2024 10:15:56 -0400 Subject: [PATCH 28/58] JSON parse failures should be 4xx codes (#112703) It seemed if there wasn't any text to parse, this is not an internal issue but instead an argument issue. I simply changed the exception thrown. If we don't agree with this, I can adjust `query` parsing directly, but this seemed like the better choice. closes: https://github.com/elastic/elasticsearch/issues/112296 --- docs/changelog/112703.yaml | 5 +++++ .../xcontent/provider/json/JsonXContentParser.java | 2 +- .../elasticsearch/index/mapper/IpFieldMapper.java | 12 ++---------- .../org/elasticsearch/common/ReferenceDocsTests.java | 2 +- .../index/query/MatchQueryBuilderTests.java | 2 +- 5 files changed, 10 insertions(+), 13 deletions(-) create mode 100644 docs/changelog/112703.yaml diff --git a/docs/changelog/112703.yaml b/docs/changelog/112703.yaml new file mode 100644 index 0000000000000..a428e8c4e2339 --- /dev/null +++ b/docs/changelog/112703.yaml @@ -0,0 +1,5 @@ +pr: 112703 +summary: JSON parse failures should be 4xx codes +area: Infra/Core +type: bug +issues: [] diff --git a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java index c59f003d9cb04..63191084ca837 100644 --- a/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java +++ b/libs/x-content/impl/src/main/java/org/elasticsearch/xcontent/provider/json/JsonXContentParser.java @@ -111,7 +111,7 @@ public String text() throws IOException { } private void throwOnNoText() { - throw new IllegalStateException("Can't get text on a " + currentToken() + " at " + getTokenLocation()); + throw new IllegalArgumentException("Expected text at " + getTokenLocation() + " but found " + currentToken()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 062e7551a53c9..638af1a105328 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -42,7 +42,6 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.lookup.FieldValues; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.net.InetAddress; @@ -545,8 +544,9 @@ protected String contentType() { @Override protected void parseCreateField(DocumentParserContext context) throws IOException { InetAddress address; + String value = context.parser().textOrNull(); try { - address = value(context.parser(), nullValue); + address = value == null ? nullValue : InetAddresses.forString(value); } catch (IllegalArgumentException e) { if (ignoreMalformed) { context.addIgnoredField(fieldType().name()); @@ -564,14 +564,6 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } } - private static InetAddress value(XContentParser parser, InetAddress nullValue) throws IOException { - String value = parser.textOrNull(); - if (value == null) { - return nullValue; - } - return InetAddresses.forString(value); - } - private void indexValue(DocumentParserContext context, InetAddress address) { if (dimension) { context.getDimensions().addIp(fieldType().name(), address).validate(context.indexSettings()); diff --git a/server/src/test/java/org/elasticsearch/common/ReferenceDocsTests.java b/server/src/test/java/org/elasticsearch/common/ReferenceDocsTests.java index 0fabf78017304..49208f2341701 100644 --- a/server/src/test/java/org/elasticsearch/common/ReferenceDocsTests.java +++ b/server/src/test/java/org/elasticsearch/common/ReferenceDocsTests.java @@ -66,7 +66,7 @@ public void testResourceValidation() throws Exception { builder.startObject("UNEXPECTED").endObject().endObject(); try (var stream = BytesReference.bytes(builder).streamInput()) { - expectThrows(IllegalStateException.class, () -> ReferenceDocs.readLinksBySymbol(stream)); + expectThrows(IllegalArgumentException.class, () -> ReferenceDocs.readLinksBySymbol(stream)); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 278d4ae505bdc..48e8f0ef11676 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -373,7 +373,7 @@ public void testParseFailsWithTermsArray() { "message1" : ["term1", "term2"] } }"""; - expectThrows(IllegalStateException.class, () -> parseQuery(json2)); + expectThrows(IllegalArgumentException.class, () -> parseQuery(json2)); } public void testExceptionUsingAnalyzerOnNumericField() { From 07329d7f6d4fd8e801050c5b6d5bd613c57ece02 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 11 Sep 2024 15:19:12 +0100 Subject: [PATCH 29/58] Handle null exception message in `TestCluster#wipe` (#112741) Fixing noise seen in an unrelated test failure. --- .../src/main/java/org/elasticsearch/test/TestCluster.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java index ea632599fedbf..3385cb1eb2e7e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestCluster.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; +import java.util.Objects; import java.util.Random; import java.util.Set; @@ -91,7 +92,7 @@ public void wipe(Set excludeTemplates) { l.delegateResponse((ll, e) -> { // Ignore if action isn't registered, because data streams is a module and // if the delete action isn't registered then there no data streams to delete. - if (e.getMessage().startsWith("failed to find action") == false) { + if (Objects.requireNonNullElse(e.getMessage(), "").startsWith("failed to find action") == false) { ll.onFailure(e); } else { ll.onResponse(AcknowledgedResponse.TRUE); From eabea6f69a0cab754751ab2e6b2d3e1cd03dfbfd Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Wed, 11 Sep 2024 09:32:40 -0500 Subject: [PATCH 30/58] Add TaskManager to pluginServices (#112687) Provide access to the TaskManager in plugins so that a RemovedTaskListener can be registered. This will allow a plugin to monitor when submitted tasks complete. --- docs/changelog/112687.yaml | 5 +++++ .../main/java/org/elasticsearch/node/NodeConstruction.java | 3 ++- .../org/elasticsearch/node/PluginServiceInstances.java | 4 +++- server/src/main/java/org/elasticsearch/plugins/Plugin.java | 7 +++++++ 4 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/112687.yaml diff --git a/docs/changelog/112687.yaml b/docs/changelog/112687.yaml new file mode 100644 index 0000000000000..dd079e1b700c4 --- /dev/null +++ b/docs/changelog/112687.yaml @@ -0,0 +1,5 @@ +pr: 112687 +summary: Add `TaskManager` to `pluginServices` +area: Infra/Metrics +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index eb9ef08b329ab..ea53882a22a01 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -855,7 +855,8 @@ private void construct( featureService, systemIndices, dataStreamGlobalRetentionSettings, - documentParsingProvider + documentParsingProvider, + taskManager ); Collection pluginComponents = pluginsService.flatMap(plugin -> { diff --git a/server/src/main/java/org/elasticsearch/node/PluginServiceInstances.java b/server/src/main/java/org/elasticsearch/node/PluginServiceInstances.java index 7c8775502fd64..74ae5936c9602 100644 --- a/server/src/main/java/org/elasticsearch/node/PluginServiceInstances.java +++ b/server/src/main/java/org/elasticsearch/node/PluginServiceInstances.java @@ -24,6 +24,7 @@ import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -48,5 +49,6 @@ public record PluginServiceInstances( FeatureService featureService, SystemIndices systemIndices, DataStreamGlobalRetentionSettings dataStreamGlobalRetentionSettings, - DocumentParsingProvider documentParsingProvider + DocumentParsingProvider documentParsingProvider, + TaskManager taskManager ) implements Plugin.PluginServices {} diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index a8bfda54b0646..4441ecadc3e8e 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -31,6 +31,7 @@ import org.elasticsearch.plugins.internal.DocumentParsingProvider; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -165,6 +166,12 @@ public interface PluginServices { * A provider of utilities to observe and report parsing of documents */ DocumentParsingProvider documentParsingProvider(); + + /** + * The task manager for the node. This should only be used by plugins + * to track task removal by registering a RemovedTaskListener. + */ + TaskManager taskManager(); } /** From 051f504bd149243d742c47bd63937ab59860a8e2 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 11 Sep 2024 17:45:42 +0300 Subject: [PATCH 31/58] Introduce data stream options and failure store configuration classes (#109515) In order to facilitate enabling and disabling the failure store & component template composition, we introduce new metadata classes that can support a more extensible failure store configuration. We would like to introduce **data stream options**. Data stream options capture the configuration of data stream level (smaller and larger) features, such the failure store and in the future data stream lifecycle. They are different than settings because they are applied on a data stream level and not per backing index. This PR is only setting the basic classes to enable follow up PRs that will actually use them. Examples, these are not final, they are only used to help visualise a potential direction: ``` GET _data_stream/my-*/_options { "data_streams": [ { "name": "my-non-opinionated-ds", "options": { } }, { "name": "my-fs", "options": { "failure_store": { "enabled": true } } }, { "name": "my-no-fs", "options": { "failure_store": { "enabled": false } } } ] } // If we decide to add lifecycle here too: PUT _data_stream/my-fs/_options { "failure_store": { "enabled": true }, "lifecycle": { } } ``` What we see above are 3 data streams: - `my-fs` with the failure store explicitly enabled - `my-no-fs` with the failure store explicitly disabled, and - `my-non-opinionated-ds` which does not specify what to do with the failure store, so for now it means failure store disabled but that could change in the future. Template composition examples pending --- .../metadata/DataStreamFailureStore.java | 76 +++++++++++++++ .../cluster/metadata/DataStreamOptions.java | 93 +++++++++++++++++++ .../metadata/DataStreamFailureStoreTests.java | 42 +++++++++ .../metadata/DataStreamOptionsTests.java | 44 +++++++++ 4 files changed, 255 insertions(+) create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java create mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreTests.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java new file mode 100644 index 0000000000000..d647956e752a3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamFailureStore.java @@ -0,0 +1,76 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.SimpleDiffable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +/** + * Holds the data stream failure store metadata that enable or disable the failure store of a data stream. Currently, it + * supports the following configurations: + * - enabled + */ +public record DataStreamFailureStore(boolean enabled) implements SimpleDiffable, ToXContentObject { + + public static final ParseField ENABLED_FIELD = new ParseField("enabled"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "failure_store", + false, + (args, unused) -> new DataStreamFailureStore(args[0] == null || (Boolean) args[0]) + ); + + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); + } + + public DataStreamFailureStore() { + this(true); + } + + public DataStreamFailureStore(StreamInput in) throws IOException { + this(in.readBoolean()); + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return SimpleDiffable.readDiffFrom(DataStreamFailureStore::new, in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(enabled); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ENABLED_FIELD.getPreferredName(), enabled); + builder.endObject(); + return builder; + } + + public static DataStreamFailureStore fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java new file mode 100644 index 0000000000000..9c7d2a986fa48 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamOptions.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.cluster.SimpleDiffable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +/** + * Holds data stream dedicated configuration options such as failure store, (in the future lifecycle). Currently, it + * supports the following configurations: + * - failure store + */ +public record DataStreamOptions(@Nullable DataStreamFailureStore failureStore) + implements + SimpleDiffable, + ToXContentObject { + + public static final ParseField FAILURE_STORE_FIELD = new ParseField("failure_store"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "options", + false, + (args, unused) -> new DataStreamOptions((DataStreamFailureStore) args[0]) + ); + + static { + PARSER.declareField( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> DataStreamFailureStore.fromXContent(p), + FAILURE_STORE_FIELD, + ObjectParser.ValueType.OBJECT_OR_NULL + ); + } + + public DataStreamOptions() { + this(null); + } + + public static DataStreamOptions read(StreamInput in) throws IOException { + return new DataStreamOptions(in.readOptionalWriteable(DataStreamFailureStore::new)); + } + + @Nullable + public DataStreamFailureStore getFailureStore() { + return failureStore; + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return SimpleDiffable.readDiffFrom(DataStreamOptions::read, in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(failureStore); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (failureStore != null) { + builder.field(FAILURE_STORE_FIELD.getPreferredName(), failureStore); + } + builder.endObject(); + return builder; + } + + public static DataStreamOptions fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreTests.java new file mode 100644 index 0000000000000..f5334f903af6b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamFailureStoreTests.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class DataStreamFailureStoreTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return DataStreamFailureStore::new; + } + + @Override + protected DataStreamFailureStore createTestInstance() { + return randomFailureStore(); + } + + @Override + protected DataStreamFailureStore mutateInstance(DataStreamFailureStore instance) throws IOException { + return new DataStreamFailureStore(instance.enabled() == false); + } + + @Override + protected DataStreamFailureStore doParseInstance(XContentParser parser) throws IOException { + return DataStreamFailureStore.fromXContent(parser); + } + + static DataStreamFailureStore randomFailureStore() { + return new DataStreamFailureStore(randomBoolean()); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java new file mode 100644 index 0000000000000..8a7cf2329b863 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamOptionsTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class DataStreamOptionsTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return DataStreamOptions::read; + } + + @Override + protected DataStreamOptions createTestInstance() { + return new DataStreamOptions(randomBoolean() ? null : DataStreamFailureStoreTests.randomFailureStore()); + } + + @Override + protected DataStreamOptions mutateInstance(DataStreamOptions instance) throws IOException { + var failureStore = instance.getFailureStore(); + if (failureStore == null) { + failureStore = DataStreamFailureStoreTests.randomFailureStore(); + } else { + failureStore = randomBoolean() ? null : new DataStreamFailureStore(failureStore.enabled() == false); + } + return new DataStreamOptions(failureStore); + } + + @Override + protected DataStreamOptions doParseInstance(XContentParser parser) throws IOException { + return DataStreamOptions.fromXContent(parser); + } +} From a148619e1eefabc705c6fa35db96db386963d412 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 11 Sep 2024 17:57:39 +0200 Subject: [PATCH 32/58] Support widening of numeric types in union-types (#112610) * Support widening of numeric types in union-types Only two lines of this PR are the actual fix. All the rest is updating the CSV-spec testing infrastructure to make it easier to test this, and adding the tests. The refactoring involve some cleanup and simplifications also. This update allows us to add alternative mappings of existing data files without copying the files and changing the header line. Some of the existing union-types test files were deleted as a result, which is a step in the right direction. * Update docs/changelog/112610.yaml * Link capability to PR --- docs/changelog/112610.yaml | 6 + .../plugin/esql/qa/testFixtures/build.gradle | 22 +- .../xpack/esql/CsvTestUtils.java | 19 +- .../xpack/esql/CsvTestsDataLoader.java | 219 ++++++++---------- .../resources/mapping-sample_data_str.json | 16 -- .../mapping-sample_data_ts_long.json | 16 -- .../src/main/resources/sample_data_str.csv | 8 - .../src/main/resources/union_types.csv-spec | 51 ++++ .../xpack/esql/action/EsqlCapabilities.java | 5 + .../xpack/esql/analysis/Analyzer.java | 4 +- .../convert/AbstractConvertFunction.java | 2 +- .../elasticsearch/xpack/esql/CsvTests.java | 20 +- 12 files changed, 201 insertions(+), 187 deletions(-) create mode 100644 docs/changelog/112610.yaml delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_str.json delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_ts_long.json delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data_str.csv diff --git a/docs/changelog/112610.yaml b/docs/changelog/112610.yaml new file mode 100644 index 0000000000000..3d67a80a8f0b3 --- /dev/null +++ b/docs/changelog/112610.yaml @@ -0,0 +1,6 @@ +pr: 112610 +summary: Support widening of numeric types in union-types +area: ES|QL +type: bug +issues: + - 111277 diff --git a/x-pack/plugin/esql/qa/testFixtures/build.gradle b/x-pack/plugin/esql/qa/testFixtures/build.gradle index e8a95011100f5..b6ed610406631 100644 --- a/x-pack/plugin/esql/qa/testFixtures/build.gradle +++ b/x-pack/plugin/esql/qa/testFixtures/build.gradle @@ -2,16 +2,18 @@ apply plugin: 'elasticsearch.java' apply plugin: org.elasticsearch.gradle.dependencies.CompileOnlyResolvePlugin dependencies { - implementation project(':x-pack:plugin:esql:compute') - implementation project(':x-pack:plugin:esql') - compileOnly project(path: xpackModule('core')) - implementation project(":libs:elasticsearch-x-content") - implementation project(':client:rest') - implementation project(':libs:elasticsearch-logging') - implementation project(':test:framework') - api(testArtifact(project(xpackModule('esql-core')))) - implementation project(':server') - implementation "net.sf.supercsv:super-csv:${versions.supercsv}" + implementation project(':x-pack:plugin:esql:compute') + implementation project(':x-pack:plugin:esql') + compileOnly project(path: xpackModule('core')) + implementation project(":libs:elasticsearch-x-content") + implementation project(':client:rest') + implementation project(':libs:elasticsearch-logging') + implementation project(':test:framework') + api(testArtifact(project(xpackModule('esql-core')))) + implementation project(':server') + implementation "net.sf.supercsv:super-csv:${versions.supercsv}" + implementation "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + implementation "com.fasterxml.jackson.core:jackson-databind:${versions.jackson}" } /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index c934a8926ee7e..70a054f233a3c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -118,7 +118,7 @@ public static Tuple skipVersionRange(String testName, String i return null; } - public static Tuple> loadPageFromCsv(URL source) throws Exception { + public static Tuple> loadPageFromCsv(URL source, Map typeMapping) throws Exception { record CsvColumn(String name, Type type, BuilderWrapper builderWrapper) implements Releasable { void append(String stringValue) { @@ -164,21 +164,16 @@ public void close() { if (columns == null) { columns = new CsvColumn[entries.length]; for (int i = 0; i < entries.length; i++) { - int split = entries[i].indexOf(':'); - String name, typeName; + String[] header = entries[i].split(":"); + String name = header[0].trim(); + String typeName = (typeMapping != null && typeMapping.containsKey(name)) ? typeMapping.get(name) + : header.length > 1 ? header[1].trim() + : null; - if (split < 0) { + if (typeName == null || typeName.isEmpty()) { throw new IllegalArgumentException( "A type is always expected in the schema definition; found " + entries[i] ); - } else { - name = entries[i].substring(0, split).trim(); - typeName = entries[i].substring(split + 1).trim(); - if (typeName.length() == 0) { - throw new IllegalArgumentException( - "A type is always expected in the schema definition; found " + entries[i] - ); - } } Type type = Type.asType(typeName); if (type == null) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 9ee22113a4244..068adf190653a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -6,6 +6,10 @@ */ package org.elasticsearch.xpack.esql; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; @@ -17,20 +21,13 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; -import org.elasticsearch.cluster.ClusterModule; -import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContent; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.BufferedReader; @@ -51,66 +48,43 @@ public class CsvTestsDataLoader { private static final int BULK_DATA_SIZE = 100_000; - private static final TestsDataset EMPLOYEES = new TestsDataset("employees", "mapping-default.json", "employees.csv", null, false); - private static final TestsDataset HOSTS = new TestsDataset("hosts", "mapping-hosts.json", "hosts.csv"); - private static final TestsDataset APPS = new TestsDataset("apps", "mapping-apps.json", "apps.csv"); - private static final TestsDataset LANGUAGES = new TestsDataset("languages", "mapping-languages.json", "languages.csv"); - private static final TestsDataset ALERTS = new TestsDataset("alerts", "mapping-alerts.json", "alerts.csv"); - private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs", "mapping-ul_logs.json", "ul_logs.csv"); - private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data", "mapping-sample_data.json", "sample_data.csv"); - private static final TestsDataset SAMPLE_DATA_STR = new TestsDataset( - "sample_data_str", - "mapping-sample_data_str.json", - "sample_data_str.csv" - ); - private static final TestsDataset SAMPLE_DATA_TS_LONG = new TestsDataset( - "sample_data_ts_long", - "mapping-sample_data_ts_long.json", - "sample_data_ts_long.csv" - ); - private static final TestsDataset MISSING_IP_SAMPLE_DATA = new TestsDataset( - "missing_ip_sample_data", - "mapping-missing_ip_sample_data.json", - "missing_ip_sample_data.csv" - ); - private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips", "mapping-clientips.json", "clientips.csv"); - private static final TestsDataset CLIENT_CIDR = new TestsDataset("client_cidr", "mapping-client_cidr.json", "client_cidr.csv"); - private static final TestsDataset AGES = new TestsDataset("ages", "mapping-ages.json", "ages.csv"); - private static final TestsDataset HEIGHTS = new TestsDataset("heights", "mapping-heights.json", "heights.csv"); - private static final TestsDataset DECADES = new TestsDataset("decades", "mapping-decades.json", "decades.csv"); - private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); - private static final TestsDataset AIRPORTS_MP = new TestsDataset("airports_mp", "mapping-airports.json", "airports_mp.csv"); - private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); - private static final TestsDataset DATE_NANOS = new TestsDataset("date_nanos", "mapping-date_nanos.json", "date_nanos.csv"); - private static final TestsDataset COUNTRIES_BBOX = new TestsDataset( - "countries_bbox", - "mapping-countries_bbox.json", - "countries_bbox.csv" - ); - private static final TestsDataset COUNTRIES_BBOX_WEB = new TestsDataset( - "countries_bbox_web", - "mapping-countries_bbox_web.json", - "countries_bbox_web.csv" - ); - private static final TestsDataset AIRPORT_CITY_BOUNDARIES = new TestsDataset( - "airport_city_boundaries", - "mapping-airport_city_boundaries.json", - "airport_city_boundaries.csv" - ); - private static final TestsDataset CARTESIAN_MULTIPOLYGONS = new TestsDataset( - "cartesian_multipolygons", - "mapping-cartesian_multipolygons.json", - "cartesian_multipolygons.csv" - ); - private static final TestsDataset DISTANCES = new TestsDataset("distances", "mapping-distances.json", "distances.csv"); - private static final TestsDataset K8S = new TestsDataset("k8s", "k8s-mappings.json", "k8s.csv", "k8s-settings.json", true); - private static final TestsDataset ADDRESSES = new TestsDataset("addresses", "mapping-addresses.json", "addresses.csv", null, true); - private static final TestsDataset BOOKS = new TestsDataset("books", "mapping-books.json", "books.csv", null, true); + private static final TestsDataset EMPLOYEES = new TestsDataset("employees", "mapping-default.json", "employees.csv").noSubfields(); + private static final TestsDataset HOSTS = new TestsDataset("hosts"); + private static final TestsDataset APPS = new TestsDataset("apps"); + private static final TestsDataset APPS_SHORT = APPS.withIndex("apps_short").withTypeMapping(Map.of("id", "short")); + private static final TestsDataset LANGUAGES = new TestsDataset("languages"); + private static final TestsDataset ALERTS = new TestsDataset("alerts"); + private static final TestsDataset UL_LOGS = new TestsDataset("ul_logs"); + private static final TestsDataset SAMPLE_DATA = new TestsDataset("sample_data"); + private static final TestsDataset SAMPLE_DATA_STR = SAMPLE_DATA.withIndex("sample_data_str") + .withTypeMapping(Map.of("client_ip", "keyword")); + private static final TestsDataset SAMPLE_DATA_TS_LONG = SAMPLE_DATA.withIndex("sample_data_ts_long") + .withData("sample_data_ts_long.csv") + .withTypeMapping(Map.of("@timestamp", "long")); + private static final TestsDataset MISSING_IP_SAMPLE_DATA = new TestsDataset("missing_ip_sample_data"); + private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips"); + private static final TestsDataset CLIENT_CIDR = new TestsDataset("client_cidr"); + private static final TestsDataset AGES = new TestsDataset("ages"); + private static final TestsDataset HEIGHTS = new TestsDataset("heights"); + private static final TestsDataset DECADES = new TestsDataset("decades"); + private static final TestsDataset AIRPORTS = new TestsDataset("airports"); + private static final TestsDataset AIRPORTS_MP = AIRPORTS.withIndex("airports_mp").withData("airports_mp.csv"); + private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web"); + private static final TestsDataset DATE_NANOS = new TestsDataset("date_nanos"); + private static final TestsDataset COUNTRIES_BBOX = new TestsDataset("countries_bbox"); + private static final TestsDataset COUNTRIES_BBOX_WEB = new TestsDataset("countries_bbox_web"); + private static final TestsDataset AIRPORT_CITY_BOUNDARIES = new TestsDataset("airport_city_boundaries"); + private static final TestsDataset CARTESIAN_MULTIPOLYGONS = new TestsDataset("cartesian_multipolygons"); + private static final TestsDataset DISTANCES = new TestsDataset("distances"); + private static final TestsDataset K8S = new TestsDataset("k8s", "k8s-mappings.json", "k8s.csv").withSetting("k8s-settings.json"); + private static final TestsDataset ADDRESSES = new TestsDataset("addresses"); + private static final TestsDataset BOOKS = new TestsDataset("books"); public static final Map CSV_DATASET_MAP = Map.ofEntries( Map.entry(EMPLOYEES.indexName, EMPLOYEES), Map.entry(HOSTS.indexName, HOSTS), Map.entry(APPS.indexName, APPS), + Map.entry(APPS_SHORT.indexName, APPS_SHORT), Map.entry(LANGUAGES.indexName, LANGUAGES), Map.entry(UL_LOGS.indexName, UL_LOGS), Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), @@ -258,18 +232,8 @@ public static void loadDataSetIntoEs(RestClient client, Logger logger) throws IO } private static void loadDataSetIntoEs(RestClient client, Logger logger, IndexCreator indexCreator) throws IOException { - for (var dataSet : CSV_DATASET_MAP.values()) { - final String settingName = dataSet.settingFileName != null ? "/" + dataSet.settingFileName : null; - load( - client, - dataSet.indexName, - "/" + dataSet.mappingFileName, - settingName, - "/" + dataSet.dataFileName, - dataSet.allowSubFields, - logger, - indexCreator - ); + for (var dataset : CSV_DATASET_MAP.values()) { + load(client, dataset, logger, indexCreator); } forceMerge(client, CSV_DATASET_MAP.keySet(), logger); for (var policy : ENRICH_POLICIES) { @@ -291,32 +255,51 @@ private static void loadEnrichPolicy(RestClient client, String policyName, Strin client.performRequest(request); } - private static void load( - RestClient client, - String indexName, - String mappingName, - String settingName, - String dataName, - boolean allowSubFields, - Logger logger, - IndexCreator indexCreator - ) throws IOException { + private static void load(RestClient client, TestsDataset dataset, Logger logger, IndexCreator indexCreator) throws IOException { + final String mappingName = "/" + dataset.mappingFileName; URL mapping = CsvTestsDataLoader.class.getResource(mappingName); if (mapping == null) { throw new IllegalArgumentException("Cannot find resource " + mappingName); } + final String dataName = "/" + dataset.dataFileName; URL data = CsvTestsDataLoader.class.getResource(dataName); if (data == null) { throw new IllegalArgumentException("Cannot find resource " + dataName); } Settings indexSettings = Settings.EMPTY; + final String settingName = dataset.settingFileName != null ? "/" + dataset.settingFileName : null; if (settingName != null) { indexSettings = Settings.builder() .loadFromStream(settingName, CsvTestsDataLoader.class.getResourceAsStream(settingName), false) .build(); } - indexCreator.createIndex(client, indexName, readTextFile(mapping), indexSettings); - loadCsvData(client, indexName, data, allowSubFields, CsvTestsDataLoader::createParser, logger); + indexCreator.createIndex(client, dataset.indexName, readMappingFile(mapping, dataset.typeMapping), indexSettings); + loadCsvData(client, dataset.indexName, data, dataset.allowSubFields, logger); + } + + private static String readMappingFile(URL resource, Map typeMapping) throws IOException { + String mappingJsonText = readTextFile(resource); + if (typeMapping == null || typeMapping.isEmpty()) { + return mappingJsonText; + } + boolean modified = false; + ObjectMapper mapper = new ObjectMapper(); + JsonNode mappingNode = mapper.readTree(mappingJsonText); + JsonNode propertiesNode = mappingNode.path("properties"); + + for (Map.Entry entry : typeMapping.entrySet()) { + String key = entry.getKey(); + String newType = entry.getValue(); + + if (propertiesNode.has(key)) { + modified = true; + ((ObjectNode) propertiesNode.get(key)).put("type", newType); + } + } + if (modified) { + return mapper.writerWithDefaultPrettyPrinter().writeValueAsString(mappingNode); + } + return mappingJsonText; } public static String readTextFile(URL resource) throws IOException { @@ -345,14 +328,8 @@ public static String readTextFile(URL resource) throws IOException { * - multi-values are comma separated * - commas inside multivalue fields can be escaped with \ (backslash) character */ - private static void loadCsvData( - RestClient client, - String indexName, - URL resource, - boolean allowSubFields, - CheckedBiFunction p, - Logger logger - ) throws IOException { + private static void loadCsvData(RestClient client, String indexName, URL resource, boolean allowSubFields, Logger logger) + throws IOException { ArrayList failures = new ArrayList<>(); StringBuilder builder = new StringBuilder(); try (BufferedReader reader = reader(resource)) { @@ -371,27 +348,17 @@ private static void loadCsvData( columns = new String[entries.length]; for (int i = 0; i < entries.length; i++) { int split = entries[i].indexOf(':'); - String name, typeName; - if (split < 0) { - throw new IllegalArgumentException( - "A type is always expected in the schema definition; found " + entries[i] - ); + columns[i] = entries[i].trim(); } else { - name = entries[i].substring(0, split).trim(); + String name = entries[i].substring(0, split).trim(); if (allowSubFields || name.contains(".") == false) { - typeName = entries[i].substring(split + 1).trim(); - if (typeName.isEmpty()) { - throw new IllegalArgumentException( - "A type is always expected in the schema definition; found " + entries[i] - ); - } + columns[i] = name; } else {// if it's a subfield, ignore it in the _bulk request - name = null; + columns[i] = null; subFieldsIndices.add(i); } } - columns[i] = name; } } // data rows @@ -534,22 +501,40 @@ private static void forceMerge(RestClient client, Set indices, Logger lo } } - private static XContentParser createParser(XContent xContent, InputStream data) throws IOException { - NamedXContentRegistry contentRegistry = new NamedXContentRegistry(ClusterModule.getNamedXWriteables()); - XContentParserConfiguration config = XContentParserConfiguration.EMPTY.withRegistry(contentRegistry) - .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); - return xContent.createParser(config, data); - } - public record TestsDataset( String indexName, String mappingFileName, String dataFileName, String settingFileName, - boolean allowSubFields + boolean allowSubFields, + Map typeMapping ) { public TestsDataset(String indexName, String mappingFileName, String dataFileName) { - this(indexName, mappingFileName, dataFileName, null, true); + this(indexName, mappingFileName, dataFileName, null, true, null); + } + + public TestsDataset(String indexName) { + this(indexName, "mapping-" + indexName + ".json", indexName + ".csv", null, true, null); + } + + public TestsDataset withIndex(String indexName) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + } + + public TestsDataset withData(String dataFileName) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + } + + public TestsDataset withSetting(String settingFileName) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); + } + + public TestsDataset noSubfields() { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, false, typeMapping); + } + + public TestsDataset withTypeMapping(Map typeMapping) { + return new TestsDataset(indexName, mappingFileName, dataFileName, settingFileName, allowSubFields, typeMapping); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_str.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_str.json deleted file mode 100644 index 9e97de8c92928..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_str.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "properties": { - "@timestamp": { - "type": "date" - }, - "client_ip": { - "type": "keyword" - }, - "event_duration": { - "type": "long" - }, - "message": { - "type": "keyword" - } - } -} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_ts_long.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_ts_long.json deleted file mode 100644 index ecf21a2a919d0..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-sample_data_ts_long.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "properties": { - "@timestamp": { - "type": "long" - }, - "client_ip": { - "type": "ip" - }, - "event_duration": { - "type": "long" - }, - "message": { - "type": "keyword" - } - } -} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data_str.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data_str.csv deleted file mode 100644 index bc98671adc7ff..0000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/sample_data_str.csv +++ /dev/null @@ -1,8 +0,0 @@ -@timestamp:date,client_ip:keyword,event_duration:long,message:keyword -2023-10-23T13:55:01.543Z,172.21.3.15,1756467,Connected to 10.1.0.1 -2023-10-23T13:53:55.832Z,172.21.3.15,5033755,Connection error -2023-10-23T13:52:55.015Z,172.21.3.15,8268153,Connection error -2023-10-23T13:51:54.732Z,172.21.3.15,725448,Connection error -2023-10-23T13:33:34.937Z,172.21.0.5,1232382,Disconnected -2023-10-23T12:27:28.948Z,172.21.2.113,2764889,Connected to 10.1.0.2 -2023-10-23T12:15:03.360Z,172.21.2.162,3450233,Connected to 10.1.0.3 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index c6a2d47a78dc9..3218962678d9f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -1351,3 +1351,54 @@ FROM sample_data, sample_data_ts_long null | 172.21.0.5 | 1232382 | Disconnected | Disconnected null | 172.21.0.5 | 1232382 | Disconnected | Disconnected ; + +shortIntegerWidening +required_capability: union_types +required_capability: metadata_fields +required_capability: casting_operator +required_capability: union_types_numeric_widening + +FROM apps, apps_short METADATA _index +| EVAL id = id::integer +| KEEP _index, id, version, name +| WHERE name == "aaaaa" OR name == "hhhhh" +| SORT _index ASC, id ASC +; + +_index:keyword | id:integer | version:version | name:keyword +apps | 1 | 1 | aaaaa +apps | 8 | 1.2.3.4 | hhhhh +apps | 12 | 1.2.3.4 | aaaaa +apps_short | 1 | 1 | aaaaa +apps_short | 8 | 1.2.3.4 | hhhhh +apps_short | 12 | 1.2.3.4 | aaaaa +; + +shortIntegerWideningStats +required_capability: union_types +required_capability: casting_operator +required_capability: union_types_numeric_widening + +FROM apps, apps_short +| EVAL id = id::integer +| STATS count=count() BY name, id +| KEEP id, name, count +| SORT id ASC, name ASC +; + +id:integer | name:keyword | count:long +1 | aaaaa | 2 +2 | bbbbb | 2 +3 | ccccc | 2 +4 | ddddd | 2 +5 | eeeee | 2 +6 | fffff | 2 +7 | ggggg | 2 +8 | hhhhh | 2 +9 | iiiii | 2 +10 | jjjjj | 2 +11 | kkkkk | 2 +12 | aaaaa | 2 +13 | lllll | 2 +14 | mmmmm | 2 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c0c5ebf010ffd..475e63733022d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -198,6 +198,11 @@ public enum Cap { */ UNION_TYPES_MISSING_FIELD, + /** + * Fix for widening of short numeric types in union-types. Done in #112610 + */ + UNION_TYPES_NUMERIC_WIDENING, + /** * Fix a parsing issue where numbers below Long.MIN_VALUE threw an exception instead of parsing as doubles. * see Parsing large numbers is inconsistent #104323 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4f9ef3df29a85..9288e1cf81a15 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -115,7 +115,6 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.type.DataType.isTemporalAmount; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; @@ -1223,8 +1222,7 @@ private Expression resolveConvertFunction(AbstractConvertFunction convert, List< HashMap typeResolutions = new HashMap<>(); Set supportedTypes = convert.supportedTypes(); imf.types().forEach(type -> { - // TODO: Shouldn't we perform widening of small numerical types here? - if (supportedTypes.contains(type)) { + if (supportedTypes.contains(type.widenSmallNumeric())) { TypeResolutionKey key = new TypeResolutionKey(fa.name(), type); var concreteConvert = typeSpecificConvert(convert, fa.source(), type, imf); typeResolutions.put(key, concreteConvert); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 2795ac857983c..f3ce6b1465d6b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -63,7 +63,7 @@ protected AbstractConvertFunction(StreamInput in) throws IOException { * Build the evaluator given the evaluator a multivalued field. */ protected final ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - DataType sourceType = field().dataType(); + DataType sourceType = field().dataType().widenSmallNumeric(); var factory = factories().get(sourceType); if (factory == null) { throw EsqlIllegalArgumentException.illegalDataType(sourceType); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index a7d8c98a606b5..faf9d04532f1a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -54,6 +54,8 @@ import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.PreAnalyzer; import org.elasticsearch.xpack.esql.core.expression.Attribute; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; @@ -308,8 +310,18 @@ protected void assertResults(ExpectedResults expected, ActualResults actual, boo // CsvTestUtils.logData(actual.values(), LOGGER); } - private static IndexResolution loadIndexResolution(String mappingName, String indexName) { + private static IndexResolution loadIndexResolution(String mappingName, String indexName, Map typeMapping) { var mapping = new TreeMap<>(loadMapping(mappingName)); + if ((typeMapping == null || typeMapping.isEmpty()) == false) { + for (var entry : typeMapping.entrySet()) { + if (mapping.containsKey(entry.getKey())) { + DataType dataType = DataType.fromTypeName(entry.getValue()); + EsField field = mapping.get(entry.getKey()); + EsField editedField = new EsField(field.getName(), dataType, field.getProperties(), field.isAggregatable()); + mapping.put(entry.getKey(), editedField); + } + } + } return IndexResolution.valid(new EsIndex(indexName, mapping, Map.of(indexName, IndexMode.STANDARD))); } @@ -320,7 +332,7 @@ private static EnrichResolution loadEnrichPolicies() { CsvTestsDataLoader.TestsDataset sourceIndex = CSV_DATASET_MAP.get(policy.getIndices().get(0)); // this could practically work, but it's wrong: // EnrichPolicyResolution should contain the policy (system) index, not the source index - EsIndex esIndex = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName()).get(); + EsIndex esIndex = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName(), null).get(); var concreteIndices = Map.of(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); enrichResolution.addResolvedPolicy( policyConfig.policyName(), @@ -349,7 +361,7 @@ private static EnrichPolicy loadEnrichPolicyMapping(String policyFileName) { } private LogicalPlan analyzedPlan(LogicalPlan parsed, CsvTestsDataLoader.TestsDataset dataset) { - var indexResolution = loadIndexResolution(dataset.mappingFileName(), dataset.indexName()); + var indexResolution = loadIndexResolution(dataset.mappingFileName(), dataset.indexName(), dataset.typeMapping()); var enrichPolicies = loadEnrichPolicies(); var analyzer = new Analyzer(new AnalyzerContext(configuration, functionRegistry, indexResolution, enrichPolicies), TEST_VERIFIER); LogicalPlan plan = analyzer.analyze(parsed); @@ -392,7 +404,7 @@ private static CsvTestsDataLoader.TestsDataset testsDataset(LogicalPlan parsed) } private static TestPhysicalOperationProviders testOperationProviders(CsvTestsDataLoader.TestsDataset dataset) throws Exception { - var testData = loadPageFromCsv(CsvTests.class.getResource("/" + dataset.dataFileName())); + var testData = loadPageFromCsv(CsvTests.class.getResource("/" + dataset.dataFileName()), dataset.typeMapping()); return new TestPhysicalOperationProviders(testData.v1(), testData.v2()); } From b9b62db84fd6c95394572ef0bac8c66ea7e257e5 Mon Sep 17 00:00:00 2001 From: Maxim Kholod Date: Wed, 11 Sep 2024 18:05:48 +0200 Subject: [PATCH 33/58] add CDR related data streams to kibana_system priviliges (#112655) --- .../store/KibanaOwnedReservedRoleDescriptors.java | 14 +++++++++++++- .../authz/store/ReservedRolesStoreTests.java | 10 +++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 6177329089bd3..89733761f3dc0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -420,7 +420,19 @@ static RoleDescriptor kibanaSystem(String name) { // For source indices of the Cloud Detection & Response (CDR) packages that ships a // transform RoleDescriptor.IndicesPrivileges.builder() - .indices("logs-wiz.vulnerability-*", "logs-wiz.cloud_configuration_finding-*", "logs-aws.securityhub_findings-*") + .indices( + "logs-wiz.vulnerability-*", + "logs-wiz.cloud_configuration_finding-*", + "logs-google_scc.finding-*", + "logs-aws.securityhub_findings-*", + "logs-aws.inspector-*", + "logs-amazon_security_lake.findings-*", + "logs-qualys_vmdr.asset_host_detection-*", + "logs-tenable_sc.vulnerability-*", + "logs-tenable_io.vulnerability-*", + "logs-rapid7_insightvm.vulnerability-*", + "logs-carbon_black_cloud.asset_vulnerability_summary-*" + ) .privileges("read", "view_index_metadata") .build(), // For alias indices of the Cloud Detection & Response (CDR) packages that ships a diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 54a5678579ce4..a476bbfb229fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -1612,7 +1612,15 @@ public void testKibanaSystemRole() { Arrays.asList( "logs-wiz.vulnerability-" + randomAlphaOfLength(randomIntBetween(0, 13)), "logs-wiz.cloud_configuration_finding-" + randomAlphaOfLength(randomIntBetween(0, 13)), - "logs-aws.securityhub_findings-" + randomAlphaOfLength(randomIntBetween(0, 13)) + "logs-google_scc.finding-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-aws.securityhub_findings-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-aws.inspector-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-amazon_security_lake.findings-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-qualys_vmdr.asset_host_detection-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-tenable_sc.vulnerability-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-tenable_io.vulnerability-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-rapid7_insightvm.vulnerability-" + randomAlphaOfLength(randomIntBetween(0, 13)), + "logs-carbon_black_cloud.asset_vulnerability_summary-" + randomAlphaOfLength(randomIntBetween(0, 13)) ).forEach(indexName -> { final IndexAbstraction indexAbstraction = mockIndexAbstraction(indexName); assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(indexAbstraction), is(false)); From 1a054887f39813c91092666986ce324d0f14a986 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 11 Sep 2024 09:40:11 -0700 Subject: [PATCH 34/58] Bump Elasticsearch version to 9.0.0 (#112570) --- .backportrc.json | 7 +- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 323 +-------------- .buildkite/pipelines/periodic.yml | 384 +----------------- .ci/bwcVersions | 19 +- .ci/snapshotBwcVersions | 2 +- REST_API_COMPATIBILITY.md | 6 +- branches.json | 3 + ...lDistributionBwcSetupPluginFuncTest.groovy | 6 +- ...acyYamlRestCompatTestPluginFuncTest.groovy | 96 +++-- .../gradle/internal/BwcVersions.java | 62 ++- .../test/rest/RestTestBasePlugin.java | 4 + .../AbstractYamlRestCompatTestPlugin.java | 23 +- .../LegacyYamlRestCompatTestPlugin.java | 2 +- .../compat/YamlRestCompatTestPlugin.java | 2 +- .../gradle/internal/BwcVersionsSpec.groovy | 82 +++- build-tools-internal/version.properties | 2 +- build.gradle | 8 +- .../elasticsearch/core/RestApiVersion.java | 12 +- modules/aggregations/build.gradle | 24 -- modules/analysis-common/build.gradle | 7 - .../common/CommonAnalysisPluginTests.java | 106 ----- .../common/EdgeNGramTokenizerTests.java | 61 +-- .../common/SynonymsAnalysisTests.java | 6 +- ...DelimiterGraphTokenFilterFactoryTests.java | 71 +--- .../health-shards-availability/build.gradle | 4 - modules/ingest-attachment/build.gradle | 10 +- modules/ingest-common/build.gradle | 4 - modules/ingest-geoip/build.gradle | 5 - .../qa/full-cluster-restart/build.gradle | 4 +- .../ingest/geoip/FullClusterRestartIT.java | 4 + modules/ingest-user-agent/build.gradle | 4 - modules/lang-mustache/build.gradle | 5 - modules/lang-painless/build.gradle | 42 -- .../legacygeo/GeoJsonShapeParserTests.java | 3 + .../legacygeo/GeoWKTShapeParserTests.java | 7 + .../LegacyGeoShapeFieldMapperTests.java | 4 + .../mapper/LegacyGeoShapeFieldTypeTests.java | 4 + modules/parent-join/build.gradle | 4 - modules/percolator/build.gradle | 4 - modules/reindex/build.gradle | 27 -- .../migration/MultiFeatureMigrationIT.java | 4 +- modules/repository-url/build.gradle | 5 - modules/runtime-fields-common/build.gradle | 7 - plugins/analysis-icu/build.gradle | 4 - .../AnalysisPhoneticFactoryTests.java | 2 +- ...rameterizedFullClusterRestartTestCase.java | 3 +- .../elasticsearch/upgrades/RecoveryIT.java | 8 +- rest-api-spec/build.gradle | 188 --------- ...config_exclusions_with_node_name_part.json | 33 -- .../api/indices.put_template_with_param.json | 54 --- .../10_basic_compat.yml | 18 - .../10_basic_upgrade.yml | 42 -- .../indices.put_template/10_basic_compat.yml | 66 --- .../nodes.hot_threads/10_basic_compat.yml | 19 - .../test/search.aggregation/10_moving_avg.yml | 28 -- .../search.sort/10_nested_path_filter.yml | 149 ------- .../test/search/10_cutoff_frequency.yml | 103 ----- .../test/search/10_geo_bounding_box.yml | 78 ---- .../test/search/10_type_query.yml | 52 --- .../mapping/MalformedDynamicTemplateIT.java | 3 + .../repositories/IndexSnapshotsServiceIT.java | 5 +- .../org/elasticsearch/TransportVersions.java | 2 + .../main/java/org/elasticsearch/Version.java | 27 +- .../elasticsearch/index/IndexVersions.java | 6 +- .../rest/RestCompatibleVersionHelper.java | 7 +- .../cluster/stats/VersionStatsTests.java | 10 +- .../FieldCapabilitiesNodeResponseTests.java | 48 --- .../FieldCapabilitiesResponseTests.java | 47 --- .../MetadataCreateIndexServiceTests.java | 3 + .../cluster/metadata/MetadataTests.java | 20 +- .../CompatibleNamedXContentRegistryTests.java | 3 + .../env/NodeEnvironmentTests.java | 5 + .../elasticsearch/env/NodeMetadataTests.java | 5 + .../index/IndexSortSettingsTests.java | 14 - .../index/IndexVersionTests.java | 47 ++- .../index/engine/InternalEngineTests.java | 116 +----- .../index/mapper/DateFieldMapperTests.java | 54 --- .../index/mapper/DynamicTemplatesTests.java | 95 +---- .../mapper/FieldNamesFieldMapperTests.java | 37 -- .../index/mapper/IpFieldMapperTests.java | 7 - .../index/mapper/ParametrizedMapperTests.java | 36 +- .../index/mapper/TypeParsersTests.java | 29 -- ...BinaryDenseVectorScriptDocValuesTests.java | 4 +- .../vectors/DenseVectorFieldMapperTests.java | 18 - .../vectors/SparseVectorFieldMapperTests.java | 42 -- .../vectors/VectorEncoderDecoderTests.java | 3 +- .../similarity/SimilarityServiceTests.java | 6 +- .../indices/IndicesModuleTests.java | 3 + .../indices/analysis/AnalysisModuleTests.java | 29 -- .../RestCompatibleVersionHelperTests.java | 3 + .../script/VectorScoreScriptUtilsTests.java | 16 +- .../field/vectors/DenseVectorTests.java | 2 +- .../index/mapper/MapperTestCase.java | 16 - .../index/mapper/MetadataMapperTestCase.java | 6 +- .../AbstractSnapshotIntegTestCase.java | 5 +- .../plugin/autoscaling/qa/rest/build.gradle | 6 - x-pack/plugin/build.gradle | 117 +----- .../action/AutoFollowCoordinatorTests.java | 3 + .../ilm/SetSingleNodeAllocateStepTests.java | 19 +- .../IndexDeprecationChecksTests.java | 2 +- x-pack/plugin/downsample/qa/rest/build.gradle | 6 +- .../qa/full-cluster-restart/build.gradle | 5 - .../application/FullClusterRestartIT.java | 2 + .../xpack/eql/EsEQLCorrectnessIT.java | 2 + x-pack/plugin/eql/qa/rest/build.gradle | 10 +- .../src/main/resources/stats.csv-spec | 2 +- .../plugin/mapper-unsigned-long/build.gradle | 9 - .../build.gradle | 2 +- .../bwc/codecs/OldCodecsAvailableTests.java | 3 + ...bleSnapshotIndexMetadataUpgraderTests.java | 4 + ...tyImplicitBehaviorBootstrapCheckTests.java | 87 +--- .../SnapshotsRecoveryPlannerServiceTests.java | 5 + ...BoxQueryLegacyGeoShapeWithDocValuesIT.java | 4 + .../search/LegacyGeoShapeWithDocValuesIT.java | 4 + ...GeoShapeWithDocValuesFieldMapperTests.java | 13 +- .../index/mapper/ShapeFieldMapperTests.java | 2 +- .../GeoShapeQueryBuilderGeoShapeTests.java | 24 +- ...LegacyGeoShapeWithDocValuesQueryTests.java | 4 + .../ShapeQueryBuilderOverShapeTests.java | 15 +- x-pack/plugin/sql/qa/jdbc/build.gradle | 2 +- .../10_compat_geo_shape_with_types.yml | 68 ---- .../test/freeze.gone/10_basic_compat.yml | 38 -- .../build.gradle | 2 +- x-pack/plugin/watcher/qa/rest/build.gradle | 22 - .../api/xpack-watcher.ack_watch.json | 55 --- .../api/xpack-watcher.activate_watch.json | 34 -- .../api/xpack-watcher.deactivate_watch.json | 34 -- .../api/xpack-watcher.delete_watch.json | 33 -- .../api/xpack-watcher.execute_watch.json | 57 --- .../api/xpack-watcher.get_watch.json | 34 -- .../api/xpack-watcher.put_watch.json | 57 --- .../api/xpack-watcher.start.json | 28 -- .../api/xpack-watcher.stats.json | 66 --- .../rest-api-spec/api/xpack-watcher.stop.json | 28 -- .../mapper/WildcardFieldMapperTests.java | 36 -- .../qa/repository-old-versions/build.gradle | 2 +- .../qa/xpack-prefix-rest-compat/build.gradle | 195 --------- .../xpack/test/rest/XPackRestIT.java | 24 -- .../api/xpack-license.delete.json | 27 -- .../rest-api-spec/api/xpack-license.get.json | 38 -- .../api/xpack-license.get_basic_status.json | 28 -- .../api/xpack-license.get_trial_status.json | 28 -- .../rest-api-spec/api/xpack-license.post.json | 46 --- .../api/xpack-license.post_start_basic.json | 33 -- .../api/xpack-license.post_start_trial.json | 37 -- .../api/xpack-migration.deprecations.json | 44 -- .../rest-api-spec/api/xpack-ml.close_job.json | 60 --- .../api/xpack-ml.delete_calendar.json | 33 -- .../api/xpack-ml.delete_calendar_event.json | 37 -- .../api/xpack-ml.delete_calendar_job.json | 37 -- .../api/xpack-ml.delete_datafeed.json | 40 -- .../api/xpack-ml.delete_expired_data.json | 58 --- .../api/xpack-ml.delete_filter.json | 33 -- .../api/xpack-ml.delete_forecast.json | 65 --- .../api/xpack-ml.delete_job.json | 45 -- .../api/xpack-ml.delete_model_snapshot.json | 37 -- .../rest-api-spec/api/xpack-ml.flush_job.json | 59 --- .../rest-api-spec/api/xpack-ml.forecast.json | 50 --- .../api/xpack-ml.get_buckets.json | 97 ----- .../api/xpack-ml.get_calendar_events.json | 55 --- .../api/xpack-ml.get_calendars.json | 55 --- .../api/xpack-ml.get_categories.json | 73 ---- .../api/xpack-ml.get_datafeed_stats.json | 56 --- .../api/xpack-ml.get_datafeeds.json | 62 --- .../api/xpack-ml.get_filters.json | 49 --- .../api/xpack-ml.get_influencers.json | 72 ---- .../api/xpack-ml.get_job_stats.json | 52 --- .../rest-api-spec/api/xpack-ml.get_jobs.json | 62 --- .../api/xpack-ml.get_model_snapshots.json | 85 ---- .../api/xpack-ml.get_overall_buckets.json | 73 ---- .../api/xpack-ml.get_records.json | 72 ---- .../rest-api-spec/api/xpack-ml.info.json | 27 -- .../rest-api-spec/api/xpack-ml.open_job.json | 33 -- .../api/xpack-ml.post_calendar_events.json | 38 -- .../rest-api-spec/api/xpack-ml.post_data.json | 49 --- .../api/xpack-ml.preview_datafeed.json | 49 --- .../api/xpack-ml.put_calendar.json | 38 -- .../api/xpack-ml.put_calendar_job.json | 37 -- .../api/xpack-ml.put_datafeed.json | 63 --- .../api/xpack-ml.put_filter.json | 38 -- .../rest-api-spec/api/xpack-ml.put_job.json | 63 --- .../api/xpack-ml.revert_model_snapshot.json | 47 --- .../api/xpack-ml.set_upgrade_mode.json | 37 -- .../api/xpack-ml.start_datafeed.json | 54 --- .../api/xpack-ml.stop_datafeed.json | 60 --- .../api/xpack-ml.update_datafeed.json | 63 --- .../api/xpack-ml.update_filter.json | 38 -- .../api/xpack-ml.update_job.json | 38 -- .../api/xpack-ml.update_model_snapshot.json | 43 -- .../rest-api-spec/api/xpack-ml.validate.json | 33 -- .../api/xpack-ml.validate_detector.json | 33 -- .../api/xpack-monitoring.bulk.json | 48 --- .../api/xpack-rollup.delete_job.json | 33 -- .../api/xpack-rollup.get_jobs.json | 43 -- .../api/xpack-rollup.get_rollup_caps.json | 43 -- .../xpack-rollup.get_rollup_index_caps.json | 33 -- .../api/xpack-rollup.put_job.json | 38 -- .../api/xpack-rollup.start_job.json | 33 -- .../api/xpack-rollup.stop_job.json | 45 -- .../api/xpack-sql.clear_cursor.json | 28 -- .../rest-api-spec/api/xpack-sql.query.json | 35 -- .../api/xpack-sql.translate.json | 30 -- .../api/xpack-ssl.certificates.json | 28 -- .../test/monitoring.bulk/10_basic_compat.yml | 31 -- 205 files changed, 498 insertions(+), 6811 deletions(-) delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/cluster.post_voting_config_exclusions_with_node_name_part.json delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/indices.put_template_with_param.json delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic_compat.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.deprecated.upgrade/10_basic_upgrade.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.put_template/10_basic_compat.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/nodes.hot_threads/10_basic_compat.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.aggregation/10_moving_avg.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_geo_bounding_box.yml delete mode 100644 rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml delete mode 100644 x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec.test.geo_shape/10_compat_geo_shape_with_types.yml delete mode 100644 x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.ack_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.activate_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.deactivate_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.delete_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.execute_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.get_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.put_watch.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.start.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stats.json delete mode 100644 x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stop.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/build.gradle delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.delete.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_basic_status.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_trial_status.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_basic.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_trial.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-migration.deprecations.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.close_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_event.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_expired_data.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_filter.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_forecast.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_model_snapshot.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.flush_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.forecast.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_buckets.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendar_events.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendars.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_categories.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeed_stats.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeeds.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_filters.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_influencers.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_job_stats.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_jobs.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_model_snapshots.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_overall_buckets.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_records.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.info.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.open_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_calendar_events.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_data.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.preview_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_filter.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.revert_model_snapshot.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.set_upgrade_mode.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.start_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.stop_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_datafeed.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_filter.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_model_snapshot.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate_detector.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-monitoring.bulk.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.delete_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_jobs.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_caps.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_index_caps.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.put_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.start_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.stop_job.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.clear_cursor.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.query.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.translate.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ssl.certificates.json delete mode 100644 x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/test/monitoring.bulk/10_basic_compat.yml diff --git a/.backportrc.json b/.backportrc.json index 77b06cd419275..d2e92817c026b 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,9 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { - "^v8.16.0$" : "main", + "^v9.0.0$" : "main", + "^v8.16.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } -} \ No newline at end of file +} diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index f698f722d977e..e7ba4ba7610cd 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -62,7 +62,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.25", "8.15.2", "8.16.0"] + BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 3c98dd4b30e74..8ef8f5954887e 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -33,312 +33,6 @@ steps: env: {} - group: packaging-tests-upgrade steps: - - label: "{{matrix.image}} / 7.0.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.0.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.0.1 - - - label: "{{matrix.image}} / 7.1.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.1.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.1.1 - - - label: "{{matrix.image}} / 7.2.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.2.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.2.1 - - - label: "{{matrix.image}} / 7.3.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.3.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.3.2 - - - label: "{{matrix.image}} / 7.4.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.4.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.4.2 - - - label: "{{matrix.image}} / 7.5.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.5.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.5.2 - - - label: "{{matrix.image}} / 7.6.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.6.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.6.2 - - - label: "{{matrix.image}} / 7.7.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.7.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.7.1 - - - label: "{{matrix.image}} / 7.8.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.8.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.8.1 - - - label: "{{matrix.image}} / 7.9.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.9.3 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.9.3 - - - label: "{{matrix.image}} / 7.10.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.10.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.10.2 - - - label: "{{matrix.image}} / 7.11.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.11.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.11.2 - - - label: "{{matrix.image}} / 7.12.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.12.1 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.12.1 - - - label: "{{matrix.image}} / 7.13.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.13.4 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.13.4 - - - label: "{{matrix.image}} / 7.14.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.14.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.14.2 - - - label: "{{matrix.image}} / 7.15.2 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.15.2 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.15.2 - - - label: "{{matrix.image}} / 7.16.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.16.3 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.16.3 - - - label: "{{matrix.image}} / 7.17.25 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.25 - timeout_in_minutes: 300 - matrix: - setup: - image: - - rocky-8 - - ubuntu-2004 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - machineType: custom-16-32768 - buildDirectory: /dev/shm/bk - diskSizeGb: 250 - env: - BWC_VERSION: 7.17.25 - - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.1 timeout_in_minutes: 300 @@ -628,6 +322,23 @@ steps: env: BWC_VERSION: 8.16.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + diskSizeGb: 250 + env: + BWC_VERSION: 9.0.0 + - group: packaging-tests-windows steps: - label: "{{matrix.image}} / packaging-tests-windows" diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 4f862911a2d8c..5f75b7f1a2ef4 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -2,366 +2,6 @@ steps: - group: bwc steps: - - label: 7.0.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.0.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.0.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.1.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.1.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.1.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.2.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.2.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.2.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.3.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.3.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.3.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.4.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.4.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.4.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.5.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.5.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.5.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.6.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.6.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.6.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.7.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.7.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.7.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.8.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.8.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.8.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.9.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.9.3#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.9.3 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.10.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.10.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.10.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.11.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.11.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.11.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.12.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.12.1#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.12.1 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.13.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.13.4#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.13.4 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.14.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.14.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.14.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.15.2 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.15.2#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.15.2 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.16.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.16.3#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.16.3 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - - label: 7.17.25 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.25#bwcTest - timeout_in_minutes: 300 - agents: - provider: gcp - image: family/elasticsearch-ubuntu-2004 - machineType: n1-standard-32 - buildDirectory: /dev/shm/bk - preemptible: true - diskSizeGb: 250 - env: - BWC_VERSION: 7.17.25 - retry: - automatic: - - exit_status: "-1" - limit: 3 - signal_reason: none - - signal_reason: agent_stop - limit: 3 - - label: 8.0.1 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.1#bwcTest timeout_in_minutes: 300 @@ -702,6 +342,26 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 9.0.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + diskSizeGb: 250 + env: + BWC_VERSION: 9.0.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: concurrent-search-tests command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dtests.jvm.argline=-Des.concurrent_search=true -Des.concurrent_search=true functionalTests timeout_in_minutes: 420 @@ -771,7 +431,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.25", "8.15.2", "8.16.0"] + BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -821,7 +481,7 @@ steps: - openjdk21 - openjdk22 - openjdk23 - BWC_VERSION: ["7.17.25", "8.15.2", "8.16.0"] + BWC_VERSION: ["8.15.2", "8.16.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 6c5aaa38717ef..498727b3ecd39 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -1,22 +1,4 @@ BWC_VERSION: - - "7.0.1" - - "7.1.1" - - "7.2.1" - - "7.3.2" - - "7.4.2" - - "7.5.2" - - "7.6.2" - - "7.7.1" - - "7.8.1" - - "7.9.3" - - "7.10.2" - - "7.11.2" - - "7.12.1" - - "7.13.4" - - "7.14.2" - - "7.15.2" - - "7.16.3" - - "7.17.25" - "8.0.1" - "8.1.3" - "8.2.3" @@ -34,3 +16,4 @@ BWC_VERSION: - "8.14.3" - "8.15.2" - "8.16.0" + - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index f00be923db67c..a2f1e0c675ea5 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.25" - "8.15.2" - "8.16.0" + - "9.0.0" diff --git a/REST_API_COMPATIBILITY.md b/REST_API_COMPATIBILITY.md index c36b4ea9dbfb0..4a6ad4e7e17f5 100644 --- a/REST_API_COMPATIBILITY.md +++ b/REST_API_COMPATIBILITY.md @@ -158,12 +158,12 @@ The above code checks the request's compatible version and if the request has th The primary means of testing compatibility is via the prior major version's YAML REST tests. The build system will download the latest prior version of the YAML rest tests and execute them against the current cluster version. Prior to execution the tests will be transformed by injecting the correct headers to enable compatibility as well as other custom changes to the tests to allow the tests to pass. These customizations are configured via the build.gradle and happen just prior to test execution. Since the compatibility tests are manipulated version of the tests stored in Github (via the past major version), it is important to find the local (on disk) version for troubleshooting compatibility tests. -The tests are wired into the `check` task, so that is the easiest way to test locally prior to committing. More specifically the task is called `yamlRestTestV7CompatTest`, where 7 is the version of tests that are executing. For example, version 8 of the server will have a task named `yamlRestTestV7CompatTest` and version 9 of the server will have a task named `yamlRestTestV8CompatTest`. These behaves nearly identical to it's non-compat `yamlRestTest` task. The only variance is that the tests are sourced from the prior version branch and the tests go through a transformation phase before execution. The transformation task is `yamlRestTestV7CompatTransform` where the Vnumber follows the same convention as the test. +The tests are wired into the `check` task, so that is the easiest way to test locally prior to committing. More specifically the task is called `yamlRestCompatTest`. These behave nearly identical to it's non-compat `yamlRestTest` task. The only variance is that the tests are sourced from the prior version branch and the tests go through a transformation phase before execution. The transformation task is `yamlRestCompatTestTransform`. For example: ```bash -./gradlew :rest-api-spec:yamlRestTestV7CompatTest +./gradlew :rest-api-spec:yamlRestCompatTest ``` Since these are a variation of backward compatibility testing, the entire suite of compatibility tests will be skipped anytime the backward compatibility testing is disabled. Since the source code for these tests live in a branch of code, disabling a specific test should be done via the transformation task configuration in build.gradle (i.e. `yamlRestTestV7CompatTransform`). @@ -188,7 +188,7 @@ Muting compatibility tests should be done via a test transform. A per test skip ```groovy -tasks.named("yamlRestTestV7CompatTransform").configure({ task -> +tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTestsByFilePattern("**/cat*/*.yml", "Cat API are not supported") task.skipTest("bulk/10_basic/Array of objects", "Muted due failures. See #12345") }) diff --git a/branches.json b/branches.json index 1d860501cbc87..e464d6179f2ba 100644 --- a/branches.json +++ b/branches.json @@ -4,6 +4,9 @@ { "branch": "main" }, + { + "branch": "8.x" + }, { "branch": "8.15" }, diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy index fc7ccd651d73b..8c5c84a276719 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPluginFuncTest.groovy @@ -27,7 +27,7 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF buildFile << """ apply plugin: 'elasticsearch.internal-distribution-bwc-setup' """ - execute("git branch origin/8.0", file("cloned")) + execute("git branch origin/8.x", file("cloned")) execute("git branch origin/7.16", file("cloned")) execute("git branch origin/7.15", file("cloned")) } @@ -113,9 +113,9 @@ class InternalDistributionBwcSetupPluginFuncTest extends AbstractGitAwareGradleF result.task(":distribution:bwc:minor:buildBwcDarwinTar").outcome == TaskOutcome.SUCCESS and: "assemble task triggered" result.output.contains("[8.0.0] > Task :distribution:archives:darwin-tar:extractedAssemble") - result.output.contains("expandedRootPath /distribution/bwc/minor/build/bwc/checkout-8.0/" + + result.output.contains("expandedRootPath /distribution/bwc/minor/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install") - result.output.contains("nested folder /distribution/bwc/minor/build/bwc/checkout-8.0/" + + result.output.contains("nested folder /distribution/bwc/minor/build/bwc/checkout-8.x/" + "distribution/archives/darwin-tar/build/install/elasticsearch-8.0.0-SNAPSHOT") } diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy index 737c448f23be6..3ffbd926ec847 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/LegacyYamlRestCompatTestPluginFuncTest.groovy @@ -19,10 +19,9 @@ import org.gradle.testkit.runner.TaskOutcome class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { - def compatibleVersion = Version.fromString(VersionProperties.getVersions().get("elasticsearch")).getMajor() - 1 - def specIntermediateDir = "restResources/v${compatibleVersion}/yamlSpecs" - def testIntermediateDir = "restResources/v${compatibleVersion}/yamlTests" - def transformTask = ":yamlRestTestV${compatibleVersion}CompatTransform" + def specIntermediateDir = "restResources/compat/yamlSpecs" + def testIntermediateDir = "restResources/compat/yamlTests" + def transformTask = ":yamlRestCompatTestTransform" def YAML_FACTORY = new YAMLFactory() def MAPPER = new ObjectMapper(YAML_FACTORY) def READER = MAPPER.readerFor(ObjectNode.class) @@ -36,9 +35,11 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe buildApiRestrictionsDisabled = true } - def "yamlRestTestVxCompatTest does nothing when there are no tests"() { + def "yamlRestCompatTest does nothing when there are no tests"() { given: - subProject(":distribution:bwc:maintenance") << """ + internalBuild() + + subProject(":distribution:bwc:staged") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -46,26 +47,24 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe """ buildFile << """ - plugins { - id 'elasticsearch.legacy-yaml-rest-compat-test' - } + apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' """ when: - def result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest", '--stacktrace').build() + def result = gradleRunner("yamlRestCompatTest", '--stacktrace').build() then: - result.task(":yamlRestTestV${compatibleVersion}CompatTest").outcome == TaskOutcome.NO_SOURCE + result.task(":yamlRestCompatTest").outcome == TaskOutcome.NO_SOURCE result.task(':copyRestCompatApiTask').outcome == TaskOutcome.NO_SOURCE result.task(':copyRestCompatTestTask').outcome == TaskOutcome.NO_SOURCE result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestTestVxCompatTest executes and copies api and transforms tests from :bwc:maintenance"() { + def "yamlRestCompatTest executes and copies api and transforms tests from :bwc:staged"() { given: internalBuild() - subProject(":distribution:bwc:maintenance") << """ + subProject(":distribution:bwc:staged") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -90,7 +89,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe String wrongTest = "wrong_version.yml" String additionalTest = "additional_test.yml" setupRestResources([wrongApi], [wrongTest]) //setups up resources for current version, which should not be used for this test - String sourceSetName = "yamlRestTestV" + compatibleVersion + "Compat" + String sourceSetName = "yamlRestCompatTest" addRestTestsToProject([additionalTest], sourceSetName) //intentionally adding to yamlRestTest source set since the .classes are copied from there file("src/yamlRestTest/java/MockIT.java") << "import org.junit.Test;class MockIT { @Test public void doNothing() { }}" @@ -98,14 +97,14 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/maintenance/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/staged/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: - def result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest").build() + def result = gradleRunner("yamlRestCompatTest").build() then: - result.task(":yamlRestTestV${compatibleVersion}CompatTest").outcome == TaskOutcome.SKIPPED + result.task(":yamlRestCompatTest").outcome == TaskOutcome.SKIPPED result.task(':copyRestCompatApiTask').outcome == TaskOutcome.SUCCESS result.task(':copyRestCompatTestTask').outcome == TaskOutcome.SUCCESS result.task(transformTask).outcome == TaskOutcome.SUCCESS @@ -132,19 +131,20 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe result.task(':copyYamlTestsTask').outcome == TaskOutcome.NO_SOURCE when: - result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest").build() + result = gradleRunner("yamlRestCompatTest").build() then: - result.task(":yamlRestTestV${compatibleVersion}CompatTest").outcome == TaskOutcome.SKIPPED + result.task(":yamlRestCompatTest").outcome == TaskOutcome.SKIPPED result.task(':copyRestCompatApiTask').outcome == TaskOutcome.UP_TO_DATE result.task(':copyRestCompatTestTask').outcome == TaskOutcome.UP_TO_DATE result.task(transformTask).outcome == TaskOutcome.UP_TO_DATE } - def "yamlRestTestVxCompatTest is wired into check and checkRestCompat"() { + def "yamlRestCompatTest is wired into check and checkRestCompat"() { given: + internalBuild() withVersionCatalogue() - subProject(":distribution:bwc:maintenance") << """ + subProject(":distribution:bwc:staged") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -152,10 +152,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe """ buildFile << """ - plugins { - id 'elasticsearch.legacy-yaml-rest-compat-test' - } - + apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' """ when: @@ -164,7 +161,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe then: result.task(':check').outcome == TaskOutcome.UP_TO_DATE result.task(':checkRestCompat').outcome == TaskOutcome.UP_TO_DATE - result.task(":yamlRestTestV${compatibleVersion}CompatTest").outcome == TaskOutcome.NO_SOURCE + result.task(":yamlRestCompatTest").outcome == TaskOutcome.NO_SOURCE result.task(':copyRestCompatApiTask').outcome == TaskOutcome.NO_SOURCE result.task(':copyRestCompatTestTask').outcome == TaskOutcome.NO_SOURCE result.task(transformTask).outcome == TaskOutcome.NO_SOURCE @@ -178,7 +175,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe then: result.task(':check').outcome == TaskOutcome.UP_TO_DATE result.task(':checkRestCompat').outcome == TaskOutcome.UP_TO_DATE - result.task(":yamlRestTestV${compatibleVersion}CompatTest").outcome == TaskOutcome.SKIPPED + result.task(":yamlRestCompatTest").outcome == TaskOutcome.SKIPPED result.task(':copyRestCompatApiTask').outcome == TaskOutcome.SKIPPED result.task(':copyRestCompatTestTask').outcome == TaskOutcome.SKIPPED result.task(transformTask).outcome == TaskOutcome.SKIPPED @@ -188,7 +185,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe given: internalBuild() - subProject(":distribution:bwc:maintenance") << """ + subProject(":distribution:bwc:staged") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -204,7 +201,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe dependencies { yamlRestTestImplementation "junit:junit:4.12" } - tasks.named("yamlRestTestV${compatibleVersion}CompatTransform").configure({ task -> + tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("test/test/two", "This is a test to skip test two") task.replaceValueInMatch("_type", "_doc") task.replaceValueInMatch("_source.values", ["z", "x", "y"], "one") @@ -232,7 +229,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe setupRestResources([], []) - file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/staged/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: @@ -279,7 +276,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe - match: {} """.stripIndent() when: - def result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest").build() + def result = gradleRunner("yamlRestCompatTest").build() then: @@ -302,22 +299,22 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe --- one: - do: - do_.some.key_that_was_replaced: - index: "test" - id: 1 - keyvalue : replacedkeyvalue do_.some.key_to_replace_in_two: no_change_here: "because it's not in test 'two'" warnings: - "warning1" - "warning2" headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" + Content-Type: "application/vnd.elasticsearch+json;compatible-with=8" + Accept: "application/vnd.elasticsearch+json;compatible-with=8" allowed_warnings: - "added allowed warning" allowed_warnings_regex: - "added allowed warning regex .* [0-9]" + do_.some.key_that_was_replaced: + index: "test" + id: 1 + keyvalue : "replacedkeyvalue" - match: _source.values: - "z" @@ -334,13 +331,14 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe - is_false: "replaced_value" - is_true: "value_not_to_replace" - is_false: "value_not_to_replace" - - length: { key.in_length_that_was_replaced: 1 } - - length: { value_to_replace: 99 } + - length: + key.in_length_that_was_replaced: 1 + - length: + value_to_replace: 99 - match: _source.added: name: "jake" likes: "cheese" - --- two: - skip: @@ -349,17 +347,17 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe get: index: "test2" id: 1 - do_.some.key_that_was_replaced_in_two: - changed_here: "because it is in test 'two'" headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" + Content-Type: "application/vnd.elasticsearch+json;compatible-with=8" + Accept: "application/vnd.elasticsearch+json;compatible-with=8" warnings_regex: - "regex warning here .* [a-z]" allowed_warnings: - "added allowed warning" allowed_warnings_regex: - "added allowed warning regex .* [0-9]" + do_.some.key_that_was_replaced_in_two: + changed_here: "because it is in test 'two'" - match: _source.values: - "foo" @@ -371,12 +369,12 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe - is_false: "replaced_value" - is_true: "value_not_to_replace" - is_false: "value_not_to_replace" - - length: { value_not_to_replace: 1 } + - length: + value_not_to_replace: 1 --- "use cat with no header": - do: - cat.indices: - {} + cat.indices: {} allowed_warnings: - "added allowed warning" allowed_warnings_regex: @@ -384,7 +382,7 @@ class LegacyYamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTe - match: {} """.stripIndent()).readAll() - expectedAll.eachWithIndex{ ObjectNode expected, int i -> + expectedAll.eachWithIndex { ObjectNode expected, int i -> if(expected != actual.get(i)) { println("\nTransformed Test:") SequenceWriter sequenceWriter = WRITER.writeValues(System.out) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 41bfddb01e665..720c159f75552 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -7,8 +7,6 @@ */ package org.elasticsearch.gradle.internal; -import org.elasticsearch.gradle.Architecture; -import org.elasticsearch.gradle.ElasticsearchDistribution; import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; @@ -27,7 +25,6 @@ import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; -import java.util.stream.Collectors; import static java.util.Collections.unmodifiableList; @@ -67,7 +64,6 @@ public class BwcVersions { private static final Pattern LINE_PATTERN = Pattern.compile( "\\W+public static final Version V_(\\d+)_(\\d+)_(\\d+)(_alpha\\d+|_beta\\d+|_rc\\d+)?.*\\);" ); - private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString("7.17.0"); private static final String GLIBC_VERSION_ENV_VAR = "GLIBC_VERSION"; private final Version currentVersion; @@ -124,9 +120,7 @@ public UnreleasedVersionInfo unreleasedInfo(Version version) { } public void forPreviousUnreleased(Consumer consumer) { - filterSupportedVersions( - getUnreleased().stream().filter(version -> version.equals(currentVersion) == false).collect(Collectors.toList()) - ).stream().map(unreleased::get).forEach(consumer); + getUnreleased().stream().filter(version -> version.equals(currentVersion) == false).map(unreleased::get).forEach(consumer); } private String getBranchFor(Version version) { @@ -155,6 +149,7 @@ private Map computeUnreleased() { List unreleasedList = unreleased.stream().sorted(Comparator.reverseOrder()).toList(); Map result = new TreeMap<>(); + boolean newMinor = false; for (int i = 0; i < unreleasedList.size(); i++) { Version esVersion = unreleasedList.get(i); // This is either a new minor or staged release @@ -162,11 +157,17 @@ private Map computeUnreleased() { result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution")); } else if (esVersion.getRevision() == 0) { // If there are two upcoming unreleased minors then this one is the new minor - if (unreleasedList.get(i + 1).getRevision() == 0) { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:minor")); - } else { - result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:staged")); - } + if (newMinor == false && unreleasedList.get(i + 1).getRevision() == 0) { + result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); + newMinor = true; + } else if (newMinor == false + && unreleasedList.stream().filter(v -> v.getMajor() == esVersion.getMajor() && v.getRevision() == 0).count() == 1) { + // This is the only unreleased new minor which means we've not yet staged it for release + result.put(esVersion, new UnreleasedVersionInfo(esVersion, esVersion.getMajor() + ".x", ":distribution:bwc:minor")); + newMinor = true; + } else { + result.put(esVersion, new UnreleasedVersionInfo(esVersion, getBranchFor(esVersion), ":distribution:bwc:staged")); + } } else { // If this is the oldest unreleased version and we have a maintenance release if (i == unreleasedList.size() - 1 && hasMaintenanceRelease) { @@ -226,16 +227,9 @@ private List getReleased() { } /** - * Return versions of Elasticsearch which are index compatible with the current version, and also work on the local machine. + * Return versions of Elasticsearch which are index compatible with the current version. */ public List getIndexCompatible() { - return filterSupportedVersions(getAllIndexCompatible()); - } - - /** - * Return all versions of Elasticsearch which are index compatible with the current version. - */ - public List getAllIndexCompatible() { return versions.stream().filter(v -> v.getMajor() >= (currentVersion.getMajor() - 1)).toList(); } @@ -248,7 +242,7 @@ public void withIndexCompatible(Predicate filter, BiConsumer getWireCompatible() { - return filterSupportedVersions(versions.stream().filter(v -> v.compareTo(MINIMUM_WIRE_COMPATIBLE_VERSION) >= 0).toList()); + return versions.stream().filter(v -> v.compareTo(getMinimumWireCompatibleVersion()) >= 0).toList(); } public void withWireCompatible(BiConsumer versionAction) { @@ -259,20 +253,6 @@ public void withWireCompatible(Predicate filter, BiConsumer versionAction.accept(v, "v" + v.toString())); } - private List filterSupportedVersions(List wireCompat) { - Predicate supported = v -> true; - if (Architecture.current() == Architecture.AARCH64) { - final String version; - if (ElasticsearchDistribution.CURRENT_PLATFORM.equals(ElasticsearchDistribution.Platform.DARWIN)) { - version = "7.16.0"; - } else { - version = "7.12.0"; // linux shipped earlier for aarch64 - } - supported = v -> v.onOrAfter(version); - } - return wireCompat.stream().filter(supported).collect(Collectors.toList()); - } - public List getUnreleasedIndexCompatible() { List unreleasedIndexCompatible = new ArrayList<>(getIndexCompatible()); unreleasedIndexCompatible.retainAll(getUnreleased()); @@ -286,7 +266,17 @@ public List getUnreleasedWireCompatible() { } public Version getMinimumWireCompatibleVersion() { - return MINIMUM_WIRE_COMPATIBLE_VERSION; + // Determine minimum wire compatible version from list of known versions. + // Current BWC policy states the minimum wire compatible version is the last minor release or the previous major version. + return versions.stream() + .filter(v -> v.getRevision() == 0) + .filter(v -> v.getMajor() == currentVersion.getMajor() - 1) + .max(Comparator.naturalOrder()) + .orElseThrow(() -> new IllegalStateException("Unable to determine minimum wire compatible version.")); + } + + public Version getCurrentVersion() { + return currentVersion; } public record UnreleasedVersionInfo(Version version, String branch, String gradleProjectPath) {} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java index 77af3445f530c..a170606800f39 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/RestTestBasePlugin.java @@ -78,6 +78,7 @@ public class RestTestBasePlugin implements Plugin { private static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadataDeps"; private static final String DEFAULT_DISTRO_FEATURES_METADATA_CONFIGURATION = "defaultDistrofeaturesMetadataDeps"; private static final String TESTS_FEATURES_METADATA_PATH = "tests.features.metadata.path"; + private static final String MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP = "tests.minimum.wire.compatible"; private final ProviderFactory providerFactory; @@ -173,6 +174,9 @@ public void apply(Project project) { task.systemProperty("tests.security.manager", "false"); task.systemProperty("tests.system_call_filter", "false"); + // Pass minimum wire compatible version which is used by upgrade tests + task.systemProperty(MINIMUM_WIRE_COMPATIBLE_VERSION_SYSPROP, BuildParams.getBwcVersions().getMinimumWireCompatibleVersion()); + // Register plugins and modules as task inputs and pass paths as system properties to tests var modulePath = project.getObjects().fileCollection().from(modulesConfiguration); nonInputSystemProperties.systemProperty(TESTS_CLUSTER_MODULES_PATH_SYSPROP, modulePath::getAsPath); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index e0581ebf67081..fd1446b5ff211 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -9,8 +9,8 @@ package org.elasticsearch.gradle.internal.test.rest.compat.compat; import org.elasticsearch.gradle.Version; -import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin; +import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.internal.test.rest.CopyRestApiTask; import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask; import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin; @@ -40,6 +40,7 @@ import java.io.File; import java.nio.file.Path; import java.util.Arrays; +import java.util.Comparator; import java.util.Map; import javax.inject.Inject; @@ -60,8 +61,7 @@ public abstract class AbstractYamlRestCompatTestPlugin implements Plugin v.getMajor() == currentMajor - 1) + .min(Comparator.reverseOrder()) + .get(); + String lastMinorProjectPath = BuildParams.getBwcVersions().unreleasedInfo(lastMinor).gradleProjectPath(); + // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); - Dependency bwcMinor = project.getDependencies() - .project(Map.of("path", ":distribution:bwc:maintenance", "configuration", "checkout")); + Dependency bwcMinor = project.getDependencies().project(Map.of("path", lastMinorProjectPath, "configuration", "checkout")); project.getDependencies().add(bwcMinorConfig.getName(), bwcMinor); String projectPath = project.getPath(); @@ -183,7 +192,7 @@ public void apply(Project project) { // transform the copied tests task TaskProvider transformCompatTestTask = project.getTasks() - .register("yamlRestTestV" + COMPATIBLE_VERSION + "CompatTransform", RestCompatTestTransformTask.class, task -> { + .register("yamlRestCompatTestTransform", RestCompatTestTransformTask.class, task -> { task.getSourceDirectory().set(copyCompatYamlTestTask.flatMap(CopyRestTestsTask::getOutputResourceDir)); task.getOutputDirectory() .set(project.getLayout().getBuildDirectory().dir(compatTestsDir.resolve("transformed").toString())); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/LegacyYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/LegacyYamlRestCompatTestPlugin.java index e84c84cc426a3..0bff8d65586d3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/LegacyYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/LegacyYamlRestCompatTestPlugin.java @@ -34,7 +34,7 @@ public LegacyYamlRestCompatTestPlugin(ProjectLayout projectLayout, FileOperation @Override public TaskProvider registerTestTask(Project project, SourceSet sourceSet) { - return RestTestUtil.registerTestTask(project, sourceSet, sourceSet.getTaskName(null, "test")); + return RestTestUtil.registerTestTask(project, sourceSet, sourceSet.getName()); } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/YamlRestCompatTestPlugin.java index 79588ca722ff1..b376284761ff0 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/YamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/YamlRestCompatTestPlugin.java @@ -32,7 +32,7 @@ public YamlRestCompatTestPlugin(ProjectLayout projectLayout, FileOperations file @Override public TaskProvider registerTestTask(Project project, SourceSet sourceSet) { - return RestTestUtil.registerTestTask(project, sourceSet, sourceSet.getTaskName(null, "test"), StandaloneRestIntegTestTask.class); + return RestTestUtil.registerTestTask(project, sourceSet, sourceSet.getName(), StandaloneRestIntegTestTask.class); } @Override diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy index 39a9af38e6a9c..8fa1ac9ea2094 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/BwcVersionsSpec.groovy @@ -10,12 +10,9 @@ package org.elasticsearch.gradle.internal import spock.lang.Specification -import org.elasticsearch.gradle.Architecture -import org.elasticsearch.gradle.ElasticsearchDistribution import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo - class BwcVersionsSpec extends Specification { List versionLines = [] @@ -42,11 +39,12 @@ class BwcVersionsSpec extends Specification { unreleased == [ (v('7.16.2')): new UnreleasedVersionInfo(v('7.16.2'), '7.16', ':distribution:bwc:bugfix'), (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:minor'), + (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') ] bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.16.2'), v('7.17.0'), v('8.0.0'), v('8.1.0')] + bwc.minimumWireCompatibleVersion == v('7.17.0') } def "current version is next minor with next major and last minor both staged"() { @@ -71,11 +69,11 @@ class BwcVersionsSpec extends Specification { unreleased == [ (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.17', ':distribution:bwc:staged'), - (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.0', ':distribution:bwc:minor'), + (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), '8.x', ':distribution:bwc:minor'), (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') ] bwc.wireCompatible == [v('7.17.0'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0'), v('8.1.0')] } def "current is next minor with upcoming minor staged"() { @@ -104,7 +102,7 @@ class BwcVersionsSpec extends Specification { (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.1.0')] } def "current version is staged major"() { @@ -131,7 +129,61 @@ class BwcVersionsSpec extends Specification { (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0')] + } + + def "current version is major with unreleased next minor"() { + given: + addVersion('7.14.0', '8.9.0') + addVersion('7.14.1', '8.9.0') + addVersion('7.14.2', '8.9.0') + addVersion('7.15.0', '8.9.0') + addVersion('7.15.1', '8.9.0') + addVersion('7.15.2', '8.9.0') + addVersion('7.16.0', '8.10.0') + addVersion('7.16.1', '8.10.0') + addVersion('7.17.0', '8.10.0') + addVersion('8.0.0', '9.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('8.0.0')) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('7.16.1')): new UnreleasedVersionInfo(v('7.16.1'), '7.16', ':distribution:bwc:bugfix'), + (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), + (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('8.0.0')] + } + + def "current version is major with staged next minor"() { + given: + addVersion('7.14.0', '8.9.0') + addVersion('7.14.1', '8.9.0') + addVersion('7.14.2', '8.9.0') + addVersion('7.15.0', '8.9.0') + addVersion('7.15.1', '8.9.0') + addVersion('7.15.2', '8.9.0') + addVersion('7.16.0', '8.10.0') + addVersion('7.17.0', '8.10.0') + addVersion('8.0.0', '9.0.0') + + when: + def bwc = new BwcVersions(versionLines, v('8.0.0')) + def unreleased = bwc.unreleased.collectEntries { [it, bwc.unreleasedInfo(it)] } + + then: + unreleased == [ + (v('7.15.2')): new UnreleasedVersionInfo(v('7.15.2'), '7.15', ':distribution:bwc:bugfix'), + (v('7.16.0')): new UnreleasedVersionInfo(v('7.16.0'), '7.16', ':distribution:bwc:staged'), + (v('7.17.0')): new UnreleasedVersionInfo(v('7.17.0'), '7.x', ':distribution:bwc:minor'), + (v('8.0.0')): new UnreleasedVersionInfo(v('8.0.0'), 'main', ':distribution'), + ] + bwc.wireCompatible == [v('7.17.0'), v('8.0.0')] + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.17.0'), v('8.0.0')] } def "current version is next bugfix"() { @@ -159,7 +211,7 @@ class BwcVersionsSpec extends Specification { (v('8.0.1')): new UnreleasedVersionInfo(v('8.0.1'), 'main', ':distribution'), ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1')] } def "current version is next minor with no staged releases"() { @@ -189,7 +241,7 @@ class BwcVersionsSpec extends Specification { (v('8.1.0')): new UnreleasedVersionInfo(v('8.1.0'), 'main', ':distribution') ] bwc.wireCompatible == [v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] - bwc.indexCompatible == osFiltered([v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')]) + bwc.indexCompatible == [v('7.14.0'), v('7.14.1'), v('7.14.2'), v('7.15.0'), v('7.15.1'), v('7.15.2'), v('7.16.0'), v('7.16.1'), v('7.17.0'), v('7.17.1'), v('8.0.0'), v('8.0.1'), v('8.1.0')] } private void addVersion(String elasticsearch, String lucene) { @@ -202,12 +254,4 @@ class BwcVersionsSpec extends Specification { return Version.fromString(version) } - private boolean osxAarch64() { - Architecture.current() == Architecture.AARCH64 && - ElasticsearchDistribution.CURRENT_PLATFORM.equals(ElasticsearchDistribution.Platform.DARWIN) - } - - private List osFiltered(ArrayList versions) { - return osxAarch64() ? versions.findAll {it.onOrAfter("7.16.0")} : versions - } } diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 1dd9fb95bd17b..edb97a2968bc8 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,4 +1,4 @@ -elasticsearch = 8.16.0 +elasticsearch = 9.0.0 lucene = 9.11.1 bundled_jdk_vendor = openjdk diff --git a/build.gradle b/build.gradle index 01fdace570ce0..8430ac335d447 100644 --- a/build.gradle +++ b/build.gradle @@ -135,7 +135,7 @@ tasks.register("updateCIBwcVersions") { } doLast { - writeVersions(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.allIndexCompatible)) + writeVersions(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible)) writeVersions(file(".ci/snapshotBwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible)) expandBwcList( ".buildkite/pipelines/intake.yml", @@ -149,7 +149,7 @@ tasks.register("updateCIBwcVersions") { new ListExpansion(versions: filterIntermediatePatches(BuildParams.bwcVersions.unreleasedIndexCompatible), variable: "BWC_LIST"), ], [ - new StepExpansion(templatePath: ".buildkite/pipelines/periodic.bwc.template.yml", versions: filterIntermediatePatches(BuildParams.bwcVersions.allIndexCompatible), variable: "BWC_STEPS"), + new StepExpansion(templatePath: ".buildkite/pipelines/periodic.bwc.template.yml", versions: filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible), variable: "BWC_STEPS"), ] ) @@ -157,7 +157,7 @@ tasks.register("updateCIBwcVersions") { ".buildkite/pipelines/periodic-packaging.yml", ".buildkite/pipelines/periodic-packaging.template.yml", ".buildkite/pipelines/periodic-packaging.bwc.template.yml", - filterIntermediatePatches(BuildParams.bwcVersions.allIndexCompatible) + filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible) ) } } @@ -186,7 +186,7 @@ tasks.register("verifyVersions") { .collect { Version.fromString(it) } ) } - verifyCiYaml(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.allIndexCompatible)) + verifyCiYaml(file(".ci/bwcVersions"), filterIntermediatePatches(BuildParams.bwcVersions.indexCompatible)) verifyCiYaml(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) // Make sure backport bot config file is up to date diff --git a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java index 74acb00925e5a..e0c18f35f6cb0 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java +++ b/libs/core/src/main/java/org/elasticsearch/core/RestApiVersion.java @@ -17,13 +17,16 @@ */ public enum RestApiVersion { + V_9(9), + V_8(8), - @UpdateForV9 // v9 will not need to support the v7 REST API V_7(7); public final byte major; + @UpdateForV9 + // We need to bump current and previous to V_9 and V_8, respectively private static final RestApiVersion CURRENT = V_8; private static final RestApiVersion PREVIOUS = V_7; @@ -49,6 +52,7 @@ public static RestApiVersion minimumSupported() { public static Predicate equalTo(RestApiVersion restApiVersion) { return switch (restApiVersion) { + case V_9 -> r -> r.major == V_9.major; case V_8 -> r -> r.major == V_8.major; case V_7 -> r -> r.major == V_7.major; }; @@ -56,11 +60,14 @@ public static Predicate equalTo(RestApiVersion restApiVersion) { public static Predicate onOrAfter(RestApiVersion restApiVersion) { return switch (restApiVersion) { + case V_9 -> r -> r.major >= V_9.major; case V_8 -> r -> r.major >= V_8.major; case V_7 -> r -> r.major >= V_7.major; }; } + @UpdateForV9 + // Right now we return api version 8 for major version 9 until we bump the api version above public static RestApiVersion forMajor(int major) { switch (major) { case 7 -> { @@ -69,6 +76,9 @@ public static RestApiVersion forMajor(int major) { case 8 -> { return V_8; } + case 9 -> { + return V_8; + } default -> throw new IllegalArgumentException("Unknown REST API version " + major); } } diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 91f3303d9d4a8..5e233f423aa14 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -36,30 +36,6 @@ if (BuildParams.isSnapshotBuild() == false) { } } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("search.aggregation/20_terms/string profiler via global ordinals filters implementation", "The profiler results aren't backwards compatible.") - task.skipTest("search.aggregation/20_terms/string profiler via global ordinals native implementation", "The profiler results aren't backwards compatible.") - task.skipTest("search.aggregation/20_terms/string profiler via map", "The profiler results aren't backwards compatible.") - task.skipTest("search.aggregation/20_terms/numeric profiler", "The profiler results aren't backwards compatible.") - task.skipTest("search.aggregation/210_top_hits_nested_metric/top_hits aggregation with sequence numbers", "#42809 the use nested path and filter sort throws an exception") - task.skipTest("search.aggregation/370_doc_count_field/Test filters agg with doc_count", "Uses profiler for assertions which is not backwards compatible") - - // In 8.9.0, the default t-digest algorithm changed from AVL-tree-based to hybrid, combining a sorted array of samples with a merging - // implementation. This change leads to slight different percentile results, compared to previous versions. - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Basic test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Non-keyed test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Only aggs test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Explicit Percents test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Metadata test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/180_percentiles_tdigest_metric/Filtered test", "Hybrid t-digest produces different results.") - task.skipTest("search.aggregation/420_percentile_ranks_tdigest_metric/filtered", "Hybrid t-digest produces different results.") - - // Something has changed with response codes - task.skipTest("search.aggregation/20_terms/IP test", "Hybrid t-digest produces different results.") - - task.addAllowedWarningRegex("\\[types removal\\].*") -} - artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 1fc42a1b294fe..b43124f52552b 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -29,13 +29,6 @@ dependencies { clusterModules project(':modules:mapper-extras') } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("indices.analyze/10_analyze/htmlStrip_deprecated", "Cleanup versioned deprecations in analysis #41560") - task.skipTest("analysis-common/40_token_filters/delimited_payload_filter_error", "Remove preconfigured delimited_payload_filter #43686") - task.skipTest("analysis-common/20_analyzers/standard_html_strip", "Cleanup versioned deprecations in analysis #41560") - task.skipTest("search.query/50_queries_with_synonyms/Test common terms query with stacked tokens", "#42654 - `common` query throws an exception") -} - artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index c18cb3dddf0ae..4c8e88a0cedbf 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -52,25 +52,6 @@ public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException ex.getMessage() ); } - - final Settings settingsPre7 = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) - ) - .put("index.analysis.analyzer.custom_analyzer.type", "custom") - .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") - .put("index.analysis.filter.my_ngram.type", "nGram") - .build(); - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); - assertWarnings( - "The [nGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [ngram] instead." - ); - } } /** @@ -101,26 +82,6 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep ex.getMessage() ); } - - final Settings settingsPre7 = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) - ) - .put("index.analysis.analyzer.custom_analyzer.type", "custom") - .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.custom_analyzer.filter", "my_ngram") - .put("index.analysis.filter.my_ngram.type", "edgeNGram") - .build(); - - try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { - createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settingsPre7), settingsPre7, commonAnalysisPlugin); - assertWarnings( - "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [edge_ngram] instead." - ); - } } /** @@ -128,39 +89,6 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep * disallow usages for indices created after 8.0 */ public void testNGramTokenizerDeprecation() throws IOException { - // tests for prebuilt tokenizer - doTestPrebuiltTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestPrebuiltTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestPrebuiltTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); - doTestPrebuiltTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); expectThrows( IllegalArgumentException.class, () -> doTestPrebuiltTokenizerDeprecation( @@ -179,40 +107,6 @@ public void testNGramTokenizerDeprecation() throws IOException { true ) ); - - // same batch of tests for custom tokenizer definition in the settings - doTestCustomTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestCustomTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), - false - ); - doTestCustomTokenizerDeprecation( - "nGram", - "ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); - doTestCustomTokenizerDeprecation( - "edgeNGram", - "edge_ngram", - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_6_0, - IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) - ), - true - ); expectThrows( IllegalArgumentException.class, () -> doTestCustomTokenizerDeprecation( diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 412e3ba3e380a..48bc60b5ad0b4 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -17,14 +17,12 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.io.StringReader; @@ -47,61 +45,10 @@ private IndexAnalyzers buildAnalyzers(IndexVersion version, String tokenizer) th } public void testPreConfiguredTokenizer() throws IOException { - - // Before 7.3 we return ngrams of length 1 only - { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ); - try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t" }); - } - } - - // Check deprecated name as well - { - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ); - try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t" }); - } - } - - // Afterwards, we return ngrams of length 1 and 2, to match the default factory settings - { - try (IndexAnalyzers indexAnalyzers = buildAnalyzers(IndexVersion.current(), "edge_ngram")) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); - } - } - - // Check deprecated name as well, needs version before 8.0 because throws IAE after that - { - try ( - IndexAnalyzers indexAnalyzers = buildAnalyzers( - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_3_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ), - "edgeNGram" - ) - ) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); - - } + try (IndexAnalyzers indexAnalyzers = buildAnalyzers(IndexVersion.current(), "edge_ngram")) { + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java index 7a2bd2a822988..16288c754e922 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SynonymsAnalysisTests.java @@ -337,7 +337,7 @@ public void testShingleFilters() { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") @@ -391,7 +391,7 @@ public void testPreconfiguredTokenFilters() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); @@ -423,7 +423,7 @@ public void testDisallowedTokenFilters() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java index 68e6d6661f944..39fda06363033 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/WordDelimiterGraphTokenFilterFactoryTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -25,7 +24,6 @@ import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.io.StringReader; @@ -180,61 +178,26 @@ public void testIgnoreKeywords() throws IOException { } public void testPreconfiguredFilter() throws IOException { - // Before 7.3 we don't adjust offsets - { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Settings indexSettings = Settings.builder() - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) - ) - ) - .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") - .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - - try ( - IndexAnalyzers indexAnalyzers = new AnalysisModule( - TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin()), - new StablePluginsRegistry() - ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) - ) { - - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 0 }, new int[] { 4, 4 }); - - } - } - - // Afger 7.3 we do adjust offsets - { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); - Settings indexSettings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) - .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") - .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") - .build(); - IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(); + Settings indexSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.my_analyzer.filter", "word_delimiter_graph") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - try ( - IndexAnalyzers indexAnalyzers = new AnalysisModule( - TestEnvironment.newEnvironment(settings), - Collections.singletonList(new CommonAnalysisPlugin()), - new StablePluginsRegistry() - ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) - ) { + try ( + IndexAnalyzers indexAnalyzers = new AnalysisModule( + TestEnvironment.newEnvironment(settings), + Collections.singletonList(new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry().build(IndexCreationContext.CREATE_INDEX, idxSettings) + ) { - NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); - assertNotNull(analyzer); - assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 1 }, new int[] { 1, 4 }); + NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "h100", new String[] { "h", "100" }, new int[] { 0, 1 }, new int[] { 1, 4 }); - } } } } diff --git a/modules/health-shards-availability/build.gradle b/modules/health-shards-availability/build.gradle index 6c7cf5a19c8ac..b98824d84af94 100644 --- a/modules/health-shards-availability/build.gradle +++ b/modules/health-shards-availability/build.gradle @@ -19,7 +19,3 @@ restResources { include '_common', 'indices', 'index', 'cluster', 'nodes', 'get', 'ingest' } } - -tasks.named("yamlRestTestV7CompatTransform").configure {task -> - task.addAllowedWarningRegex("setting \\[ecs\\] is deprecated as ECS format is the default and only option") -} diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 89f0b530713c6..f708448c10d7a 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -138,14 +138,6 @@ tasks.named("forbiddenPatterns").configure { exclude '**/text-cjk-*.txt' } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - // 2 new tika metadata fields are returned in v8 - task.replaceValueInLength("_source.attachment", 8, "Test ingest attachment processor with .doc file") - task.replaceValueInLength("_source.attachment", 8, "Test ingest attachment processor with .docx file") - // Tika 2.4.0 adds an extra newline for each embedded attachment, making the content_length larger - task.replaceValueInMatch("_source.attachment.content_length", 20, "Test ingest attachment processor with .docx file") -} - tasks.named("thirdPartyAudit").configure { ignoreMissingClasses() } @@ -153,5 +145,5 @@ tasks.named("thirdPartyAudit").configure { if (BuildParams.inFipsJvm) { tasks.named("test").configure { enabled = false } tasks.named("yamlRestTest").configure { enabled = false }; - tasks.named("yamlRestTestV7CompatTest").configure { enabled = false }; + tasks.named("yamlRestCompatTest").configure { enabled = false }; } diff --git a/modules/ingest-common/build.gradle b/modules/ingest-common/build.gradle index d7100745680ba..ee923132aa6a6 100644 --- a/modules/ingest-common/build.gradle +++ b/modules/ingest-common/build.gradle @@ -49,7 +49,3 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.commons.logging.LogFactory', ) } - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.addAllowedWarningRegex("\\[types removal\\].*") -} diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index bc5bb165cd0d2..64a679581f76d 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -84,11 +84,6 @@ tasks.named("dependencyLicenses").configure { ignoreFile 'elastic-geoip-database-service-agreement-LICENSE.txt' } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTestsByFilePattern("**/ingest_geoip/20_geoip_processor.yml", "from 8.0 yaml rest tests use geoip test fixture and default geoip are no longer packaged. In 7.x yaml tests used default databases which makes tests results very different, so skipping these tests") - // task.skipTest("lang_mustache/50_multi_search_template/Multi-search template with errors", "xxx") -} - artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index a97664923438b..e930b4ca38233 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -20,12 +20,10 @@ dependencies { javaRestTestImplementation(testArtifact(project(":qa:full-cluster-restart"), "javaRestTest")) } -assert Version.fromString(VersionProperties.getVersions().get("elasticsearch")).getMajor() == 8 : - "If we are targeting a branch other than 8, we should enable migration tests" // once we are ready to test migrations from 8.x to 9.x, we can set the compatible version to 8.0.0 // see https://github.com/elastic/elasticsearch/pull/93666 -BuildParams.bwcVersions.withWireCompatible(v -> v.before("7.0.0")) { bwcVersion, baseName -> +BuildParams.bwcVersions.withWireCompatible(v -> v.before("9.0.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) systemProperty("tests.old_cluster_version", bwcVersion) diff --git a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java index 4f8abf4b82390..b4d1788688119 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java +++ b/modules/ingest-geoip/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/ingest/geoip/FullClusterRestartIT.java @@ -12,7 +12,9 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -31,6 +33,8 @@ import static org.hamcrest.Matchers.contains; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "we need to figure out the index migrations here for 9.0") public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { private static final boolean useFixture = Boolean.getBoolean("geoip_use_service") == false; diff --git a/modules/ingest-user-agent/build.gradle b/modules/ingest-user-agent/build.gradle index 64cd38c584820..d124770e33cce 100644 --- a/modules/ingest-user-agent/build.gradle +++ b/modules/ingest-user-agent/build.gradle @@ -18,7 +18,3 @@ restResources { include '_common', 'indices', 'index', 'cluster', 'nodes', 'get', 'ingest' } } - -tasks.named("yamlRestTestV7CompatTransform").configure {task -> - task.addAllowedWarningRegex("setting \\[ecs\\] is deprecated as ECS format is the default and only option") -} diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index 7059165af2d9f..3cbcabed20a98 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -26,8 +26,3 @@ restResources { } } -tasks.named("yamlRestTestV7CompatTransform").configure {task -> - task.addAllowedWarningRegex("\\[types removal\\].*") - task.replaceValueInMatch("responses.1.error.root_cause.0.type", "x_content_e_o_f_exception", "Multi-search template with errors") - task.replaceValueInMatch("responses.1.error.root_cause.0.reason", "/\\[1:22\\].Unexpected.end.of.file/", "Multi-search template with errors") -} diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index cc557ac2289f6..0b2882934a122 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -61,48 +61,6 @@ tasks.named("test").configure { jvmArgs '-XX:-OmitStackTraceInFastThrow', '-XX:-HeapDumpOnOutOfMemoryError' } -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'painless/20_scriptfield/Scripted Field Doing Compare (fields api)', - 'painless/70_execute_painless_scripts/Execute with double field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with double field context (single-value, fields api)', - '70_execute_painless_scripts/Execute with geo point field context (multi-value, fields api)', - '70_execute_painless_scripts/Execute with ip field context (single-value, fields api)', - '70_execute_painless_scripts/Execute with boolean field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with boolean field context (multi-value, fields api)', - 'painless/40_fields_api/date to long', - 'painless/130_metric_agg/Scripted Metric Agg Total (fields api)', - 'painless/70_execute_painless_scripts/Execute with keyword field context (multi-value, fields api)', - 'painless/100_terms_agg/Double Value Script with doc notation (fields api)', - 'painless/100_terms_agg/Long Value Script with doc notation (fields api)', - 'painless/20_scriptfield/Access a date (fields api)', - 'painless/70_execute_painless_scripts/Execute with date field context (multi-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with keyword field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with long field context (multi-value, fields api)', - 'painless/20_scriptfield/Scripted Field (fields api)', - 'painless/70_execute_painless_scripts/Execute with long field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with geo point field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with date field context (single-value, fields api)', - 'painless/40_fields_api/missing field', - 'painless/40_fields_api/sort script fields api', - 'painless/20_scriptfield/Access many dates (fields api)', - 'painless/70_execute_painless_scripts/Execute with long field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with geo point field context (single-value, fields api)', - 'painless/70_execute_painless_scripts/Execute with date field context (single-value, fields api)', - 'painless/40_fields_api/missing field', - 'painless/40_fields_api/sort script fields api', - 'painless/20_scriptfield/Access many dates (fields api)', - 'painless/100_terms_agg/String Value Script with doc notation (fields api)', - 'painless/40_fields_api/string to long and bigint', - 'painless/40_fields_api/boolean to long and bigint', - 'painless/40_fields_api/script fields api for dates', - 'painless/70_execute_painless_scripts/Execute with double field context (multi-value, fields api)', - 'painless/40_fields_api/filter script fields api', - 'painless/40_fields_api/script score fields api', - 'painless/70_mov_fn_agg/*' // Agg moved to a module. - ].join(',') -} - esplugin.bundleSpec.into("spi") { from(configurations.spi) } diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index 4fa1d7b7a3108..0e9d7ca5f15c8 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; import org.elasticsearch.geometry.Line; @@ -342,6 +343,8 @@ public void testParsePolygon() throws IOException, ParseException { assertGeometryEquals(p, polygonGeoJson, false); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test is using pre 8.0.0 index versions so needs to be removed or updated") public void testParse3DPolygon() throws IOException, ParseException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java index 6e8a61277cccf..74340e705b578 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeometryNormalizer; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Line; import org.elasticsearch.geometry.MultiLine; @@ -301,6 +302,8 @@ public void testParseMixedDimensionPolyWithHole() throws IOException, ParseExcep assertThat(e, hasToString(containsString("coordinate dimensions do not match"))); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test is using pre 8.0.0 index versions so needs to be removed or updated") public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { List shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0)); @@ -334,6 +337,8 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { assertThat(e, hasToString(containsString("unable to add coordinate to CoordinateBuilder: coordinate dimensions do not match"))); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test is using pre 8.0.0 index versions so needs to be removed or updated") public void testParsePolyWithStoredZ() throws IOException { List shellCoordinates = new ArrayList<>(); shellCoordinates.add(new Coordinate(100, 0, 0)); @@ -357,6 +362,8 @@ public void testParsePolyWithStoredZ() throws IOException { assertEquals(shapeBuilder.numDimensions(), 3); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test is using pre 8.0.0 index versions so needs to be removed or updated") public void testParseOpenPolygon() throws IOException { String openPolygon = "POLYGON ((100 5, 100 10, 90 10, 90 5))"; diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index 0a0bb12bedbae..407f372bee26a 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -12,6 +12,7 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoUtils; @@ -19,6 +20,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -53,6 +55,8 @@ import static org.mockito.Mockito.when; @SuppressWarnings("deprecation") +@UpdateForV9 +@AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeFieldMapperTests extends MapperTestCase { @Override diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index dc74b9cd295ce..a64352c5306e1 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -7,7 +7,9 @@ */ package org.elasticsearch.legacygeo.mapper; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.FieldTypeTestCase; @@ -20,6 +22,8 @@ import java.util.List; import java.util.Map; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase { /** diff --git a/modules/parent-join/build.gradle b/modules/parent-join/build.gradle index 844478c83e7c7..3a1d8a396c4be 100644 --- a/modules/parent-join/build.gradle +++ b/modules/parent-join/build.gradle @@ -19,7 +19,3 @@ restResources { include '_common', 'bulk', 'cluster', 'get', 'nodes', 'indices', 'index', 'search' } } - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("/30_inner_hits/profile fetch", "profile output has changed") -} diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index b9b257a42e051..041fbb8bce340 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -23,7 +23,3 @@ restResources { include '_common', 'get', 'indices', 'index', 'search', 'msearch' } } - -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - task.addAllowedWarningRegex("\\[types removal\\].*") -} diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 9cd7963224cf8..9e1e1e842ba58 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -160,30 +160,3 @@ if (OS.current() == OS.WINDOWS) { } } } - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("reindex/20_validation/reindex without source gives useful error message", "exception with a type. Not much benefit adding _doc there.") - task.skipTest("update_by_query/20_validation/update_by_query without source gives useful error message", "exception with a type. Not much benefit adding _doc there.") - - // these tests are all relying on a call to refresh all indices, when they could easily be changed - // in 7.x to call the specific index they want to refresh. - // See https://github.com/elastic/elasticsearch/issues/81188 - task.skipTest("delete_by_query/70_throttle/Rethrottle to -1 which turns off throttling", "test relies on system index being non-hidden") - task.skipTest("delete_by_query/80_slices/Multiple slices with rethrottle", "test relies on system index being non-hidden") - task.skipTest("delete_by_query/80_slices/Multiple slices with wait_for_completion=false", "test relies on system index being non-hidden") - task.skipTest("reindex/80_slices/Multiple slices with rethrottle", "test relies on system index being non-hidden") - task.skipTest("reindex/80_slices/Multiple slices with wait_for_completion=false", "test relies on system index being non-hidden") - task.skipTest("update_by_query/70_slices/Multiple slices with rethrottle", "test relies on system index being non-hidden") - task.skipTest("update_by_query/70_slices/Multiple slices with wait_for_completion=false", "test relies on system index being non-hidden") - - task.addAllowedWarningRegex("\\[types removal\\].*") -} - -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'update_by_query/80_scripting/Can\'t change _id', - 'update_by_query/80_scripting/Set unsupported operation type', - 'update_by_query/80_scripting/Setting bogus context is an error', - - ].join(',') -} diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index ebe4b1835b103..f02695c63a7e7 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -264,12 +264,12 @@ public void testMultipleFeatureMigration() throws Exception { .setAliasName(".second-internal-managed-alias") .setPrimaryIndex(".second-int-man-old") .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) - .setSettings(createSettings(IndexVersions.V_7_0_0, 0)) + .setSettings(createSettings(IndexVersions.MINIMUM_COMPATIBLE, 0)) .setMappings(createMapping(true, true)) .setOrigin(ORIGIN) .setVersionMetaKey(VERSION_META_KEY) .setAllowedElasticProductOrigins(Collections.emptyList()) - .setMinimumNodeVersion(Version.V_7_0_0) + .setMinimumNodeVersion(Version.CURRENT.minimumCompatibilityVersion()) .setPriorSystemIndexDescriptors(Collections.emptyList()) .build(); diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index 3fe2f9d9bae42..3537d430e212b 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -33,11 +33,6 @@ dependencies { internalClusterTestImplementation project(':test:fixtures:url-fixture') } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("repository_url/10_basic/Restore with repository-url using file://", "Error message has changed") - task.skipTest("repository_url/10_basic/Restore with repository-url using http://", "Error message has changed") -} - tasks.named("thirdPartyAudit").configure { ignoreMissingClasses( 'javax.servlet.ServletContextEvent', diff --git a/modules/runtime-fields-common/build.gradle b/modules/runtime-fields-common/build.gradle index 5a2d268cf7a4e..f9485b6ed3027 100644 --- a/modules/runtime-fields-common/build.gradle +++ b/modules/runtime-fields-common/build.gradle @@ -21,10 +21,3 @@ dependencies { api project(':libs:elasticsearch-grok') api project(':libs:elasticsearch-dissect') } - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("runtime_fields/100_geo_point/fetch fields from source", "Format changed. Old format was a bug.") - task.skipTest("runtime_fields/101_geo_point_from_source/fetch fields from source", "Format changed. Old format was a bug.") - task.skipTest("runtime_fields/102_geo_point_source_in_query/fetch fields from source", "Format changed. Old format was a bug.") - task.skipTest("runtime_fields/103_geo_point_calculated_at_index/fetch fields from source", "Format changed. Old format was a bug.") -} diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index 1c7db6d040be5..eed88b3232a45 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -43,7 +43,3 @@ tasks.named("dependencyLicenses").configure { mapping from: /lucene-.*/, to: 'lucene' } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("analysis_icu/10_basic/Normalization with deprecated unicodeSetFilter", "Cleanup versioned deprecations in analysis #41560") -} - diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java index 348e9f5fae7c8..c83d8b789611f 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java @@ -43,7 +43,7 @@ public void testDisallowedWithSynonyms() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java index c57112d0455c6..120b6bdf3288c 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedFullClusterRestartTestCase.java @@ -36,7 +36,8 @@ @TestCaseOrdering(FullClusterRestartTestOrdering.class) public abstract class ParameterizedFullClusterRestartTestCase extends ESRestTestCase { - private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString("7.17.0"); + + private static final Version MINIMUM_WIRE_COMPATIBLE_VERSION = Version.fromString(System.getProperty("tests.minimum.wire.compatible")); private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static IndexVersion oldIndexVersion; private static boolean upgradeFailed = false; diff --git a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index fe2236adc4904..b0025302701af 100644 --- a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -419,7 +419,7 @@ public void testRecoveryClosedIndex() throws Exception { } final IndexVersion indexVersionCreated = indexVersionCreated(indexName); - if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0)) { + if (indexVersionCreated.onOrAfter(IndexVersions.V_8_0_0)) { // index was created on a version that supports the replication of closed indices, // so we expect the index to be closed and replicated ensureGreen(indexName); @@ -448,7 +448,7 @@ public void testCloseIndexDuringRollingUpgrade() throws Exception { closeIndex(indexName); } - if (minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)) { + if (minimumIndexVersion().onOrAfter(IndexVersions.V_8_0_0)) { // index is created on a version that supports the replication of closed indices, // so we expect the index to be closed and replicated ensureGreen(indexName); @@ -483,9 +483,9 @@ public void testClosedIndexNoopRecovery() throws Exception { closeIndex(indexName); } - if (indexVersionCreated(indexName).onOrAfter(IndexVersions.V_7_2_0)) { + if (indexVersionCreated(indexName).onOrAfter(IndexVersions.V_8_0_0)) { // index was created on a version that supports the replication of closed indices, so we expect it to be closed and replicated - assertTrue(minimumIndexVersion().onOrAfter(IndexVersions.V_7_2_0)); + assertTrue(minimumIndexVersion().onOrAfter(IndexVersions.V_8_0_0)); ensureGreen(indexName); assertClosedIndex(indexName, true); if (CLUSTER_TYPE != ClusterType.OLD) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 089b7470e9a97..015c9c4b812c6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -41,194 +41,6 @@ dependencies { clusterModules project(':modules:data-streams') } -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - - task.skipTestsByFilePattern("**/cat*/*.yml", "Cat API are meant to be consumed by humans, so will not be supported by Compatible REST API") - task.skipTestsByFilePattern("**/indices.upgrade/*.yml", "upgrade api will only get a dummy endpoint returning an exception suggesting to use _reindex") - task.skipTestsByFilePattern("**/indices.stats/60_field_usage/*/*.yml", "field usage results will be different between lucene versions") - task.skipTestsByFilePattern("**/search.aggregation/*.yml", "run by the aggregation module") - - task.skipTest("bulk/11_dynamic_templates/Dynamic templates", "Error message has changed") - task.skipTest("index/80_date_nanos/date_nanos requires dates after 1970 and before 2262", "Error message has changed") - task.skipTest("indices.create/20_mix_typeless_typeful/Implicitly create a typed index while there is a typeless template", "Type information about the type is removed and not passed down. The logic to check for this is also removed.") - task.skipTest("indices.create/20_mix_typeless_typeful/Implicitly create a typeless index while there is a typed template", "Type information about the type is removed and not passed down. The logic to check for this is also removed.") - task.skipTest("delete/70_mix_typeless_typeful/DELETE with typeless API on an index that has types", "Type information about the type is removed and not passed down. The logic to check for this is also removed."); - task.skipTest("get/100_mix_typeless_typeful/GET with typeless API on an index that has types", "Failing due to not recognising missing type (the type path param is ignored, will no be fixed"); - task.skipTest("indices.get_field_mapping/21_missing_field_with_types/Return empty object if field doesn't exist, but type and index do", "This test returns test_index.mappings:{} when {} was expected. difference between 20_missing_field and 21_missing_field_with_types?") - task.skipTest("indices.get_field_mapping/30_missing_type/Raise 404 when type doesn't exist", "The information about the type is not present in the index. hence it cannot know if the type exist or not.") - task.skipTest("indices.get_mapping/20_missing_type/Existent and non-existent type returns 404 and the existing type", " The information about the type is not present in the index. hence it cannot know if the type exist or not") - task.skipTest("indices.get_mapping/20_missing_type/Existent and non-existent types returns 404 and the existing type", "The information about the type is not present in the index. hence it cannot know if the type exist or not.") - task.skipTest("indices.get_mapping/20_missing_type/No type matching pattern returns 404", "The information about the type is not present in the index. hence it cannot know if the type exist or not.") - task.skipTest("indices.get_mapping/20_missing_type/Non-existent type returns 404", "The information about the type is not present in the index. hence it cannot know if the type exist or not.") - task.skipTest("indices.get_mapping/20_missing_type/Type missing when no types exist", "The information about the type is not present in the index. hence it cannot know if the type exist or not.") - task.skipTest("indices.put_mapping/20_mix_typeless_typeful/PUT mapping with _doc on an index that has types", "The information about the type is not present in the index. hence it cannot know if the type was already used or not") - task.skipTest("indices.put_mapping/20_mix_typeless_typeful/PUT mapping with typeless API on an index that has types", "The information about the type is not present in the index. hence it cannot know if the type was already used or not") - task.skipTest("search/160_exists_query/Test exists query on _type field", "There is a small distinction between empty mappings and no mappings at all. The code to implement this test was refactored #54003; field search on _type field- not implementing. The data for _type is considered incorrect in this search") - task.skipTest("termvectors/50_mix_typeless_typeful/Term vectors with typeless API on an index that has types", "type information is not stored, hence the the index will be found") - task.skipTest("mget/11_default_index_type/Default index/type", "mget - these use cases are no longer valid because we always default to _doc.; This mean test cases where there is assertion on not finding by type won't work") - task.skipTest("mget/16_basic_with_types/Basic multi-get", "mget - these use cases are no longer valid, because we always default to _doc.; This mean test cases where there is assertion on not finding by type won't work") - task.skipTest("explain/40_mix_typeless_typeful/Explain with typeless API on an index that has types", "asserting about type not found won't work as we ignore the type information") - task.skipTest("indices.stats/20_translog/Translog retention settings are deprecated", "translog settings removal is not supported under compatible api") - task.skipTest("indices.stats/20_translog/Translog retention without soft_deletes", "translog settings removal is not supported under compatible api") - task.skipTest("indices.stats/20_translog/Translog stats on closed indices without soft-deletes", "translog settings removal is not supported under compatible api") - task.skipTest("indices.create/10_basic/Create index without soft deletes", "Make soft-deletes mandatory in 8.0 #51122 - settings changes are note supported in Rest Api compatibility") - task.skipTest("field_caps/30_filter/Field caps with index filter", "behaviour change after #63692 4digits dates are parsed as epoch and in quotes as year") - task.skipTest("indices.forcemerge/10_basic/Check deprecation warning when incompatible only_expunge_deletes and max_num_segments values are both set", "#44761 bug fix") - task.skipTest("search/340_type_query/type query", "#47207 type query throws exception in compatible mode") - task.skipTest("search/310_match_bool_prefix/multi_match multiple fields with cutoff_frequency throws exception", "#42654 cutoff_frequency, common terms are not supported. Throwing an exception") - task.skipTest("search_shards/10_basic/Search shards aliases with and without filters", "Filter representation no longer outputs default boosts") - task.skipTest("migration/10_get_feature_upgrade_status/Get feature upgrade status", "Awaits backport") - task.skipTest("search/330_fetch_fields/Test disable source", "Error no longer thrown") - task.skipTest("search/370_profile/fetch fields", "profile output has changed") - task.skipTest("search/370_profile/fetch source", "profile output has changed") - task.skipTest("search/370_profile/fetch nested source", "profile output has changed") - task.skipTest("search/240_date_nanos/doc value fields are working as expected across date and date_nanos fields", "Fetching docvalues field multiple times is no longer allowed") - task.skipTest("search/110_field_collapsing/field collapsing and rescore", "#107779 Field collapsing is compatible with rescore in 8.15") - - task.replaceValueInMatch("_type", "_doc") - task.addAllowedWarningRegex("\\[types removal\\].*") - task.replaceValueInMatch("nodes.\$node_id.roles.8", "ml", "node_info role test") - task.replaceValueInMatch("nodes.\$node_id.roles.9", "remote_cluster_client", "node_info role test") - task.removeMatch("nodes.\$node_id.roles.10", "node_info role test") - task.replaceIsTrue("test_index.mappings.type_1", "test_index.mappings._doc") - //override for indices.get and indices.create - //task.replaceIsFalse("test_index.mappings.type_1", "test_index.mappings._doc") - //overrides for indices.create/20_mix_typeless_typeful - task.replaceIsFalse("test-1.mappings._doc","false", "Create a typed index while there is a typeless template") - task.replaceIsFalse("test-1.mappings._doc","false", "Create a typeless index while there is a typed template") - - task.replaceIsTrue("test-1.mappings.my_type", "test-1.mappings._doc") - task.replaceIsTrue("test-1.mappings.my_type.properties.foo", "test-1.mappings._doc.properties.foo") - task.replaceIsTrue("test-1.mappings.my_type.properties.bar", "test-1.mappings._doc.properties.bar") - - // overrides for indices.get_field_mapping - task.replaceKeyInLength("test_index.mappings.test_type.text.mapping.text.type", - "test_index.mappings._doc.text.mapping.text.type" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.text.mapping.text.analyzer", - "test_index.mappings._doc.text.mapping.text.analyzer" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.t1.full_name", - "test_index.mappings._doc.t1.full_name" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.t2.full_name", - "test_index.mappings._doc.t2.full_name" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.obj\\.t1.full_name", - "test_index.mappings._doc.obj\\.t1.full_name" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.obj\\.i_t1.full_name", - "test_index.mappings._doc.obj\\.i_t1.full_name" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.obj\\.i_t3.full_name", - "test_index.mappings._doc.obj\\.i_t3.full_name" - ) - task.replaceKeyInLength("test_index.mappings.test_type", - "test_index.mappings._doc" - ) - task.replaceKeyInMatch("test_index_2.mappings.test_type_2.t1.full_name", - "test_index.mappings._doc.t1.full_name" - ) - task.replaceKeyInMatch("test_index_2.mappings.test_type_2.t2.full_name", - "test_index.mappings._doc.t2.full_name" - ) - task.replaceKeyInLength("test_index_2.mappings.test_type_2", - "test_index.mappings._doc" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.text.mapping.text.type", - "test_index.mappings._doc.text.mapping.text.type" - ) - // overrides for indices.put_mapping/11_basic_with_types - task.replaceKeyInMatch("test_index.mappings.test_type.properties.text1.type", - "test_index.mappings._doc.properties.text1.type" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.properties.text1.analyzer", - "test_index.mappings._doc.properties.text1.analyzer" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.properties.text2.type", - "test_index.mappings._doc.properties.text2.type" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.properties.text2.analyzer", - "test_index.mappings._doc.properties.text2.analyzer" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.properties.subfield.properties.text3.type", - "test_index.mappings._doc.properties.subfield.properties.text3.type" - ) - task.replaceKeyInMatch("test_index.mappings.test_type.properties.text1.fields.text_raw.type", - "test_index.mappings._doc.properties.text1.fields.text_raw.type" - ) - // overrides for indices.put_mapping/all_path_options_with_types - task.replaceKeyInMatch("test_index1.mappings.test_type.properties.text.type", - "test_index1.mappings._doc.properties.text.type" - ) - task.replaceKeyInMatch("test_index1.mappings.test_type.properties.text.analyzer", - "test_index1.mappings._doc.properties.text.analyzer" - ) - task.replaceKeyInMatch("test_index2.mappings.test_type.properties.text.type", - "test_index2.mappings._doc.properties.text.type" - ) - task.replaceKeyInMatch("test_index2.mappings.test_type.properties.text.analyzer", - "test_index2.mappings._doc.properties.text.analyzer" - ) - task.replaceKeyInMatch("foo.mappings.test_type.properties.text.type", - "foo.mappings._doc.properties.text.type" - ) - task.replaceKeyInMatch("foo.mappings.test_type.properties.text.analyzer", - "foo.mappings._doc.properties.text.analyzer" - ) - // overrides for indices.get_mapping - task.replaceIsTrue("test_1.mappings.doc", "test_1.mappings._doc") - task.replaceIsTrue("test_2.mappings.doc", "test_2.mappings._doc") - // overrides for mget - task.replaceValueInMatch("docs.0._type", "_doc" , "Basic multi-get") // index found, but no doc - task.replaceValueInMatch("docs.0._type", "_doc", "Default index/type") - task.replaceValueInMatch("docs.0._type", "_doc", "Non-existent index") - task.replaceValueInMatch("docs.0._type", "_doc", "Missing metadata") - task.replaceValueInMatch("docs.0._type", "_doc", "Multi Get with alias that resolves to multiple indices") - task.replaceValueInMatch("docs.1._type", "_doc", "Multi Get with alias that resolves to multiple indices") - task.replaceValueInMatch("docs.2._type", "_doc", "Multi Get with alias that resolves to multiple indices") - task.replaceValueInMatch("docs.0._type", "_doc", "IDs") - task.replaceValueInMatch("docs.1._type", "_doc", "IDs") - task.replaceValueInMatch("docs.2._type", "_doc", "Routing") - - //overrides for indices.stats - //TODO fix to remove the below match - task.replaceKeyInMatch("_all.primaries.indexing.types.baz.index_total", - "_all.primaries.indexing.types._doc.index_total" - ) - task.replaceKeyInMatch("_all.primaries.indexing.types.bar.index_total", - "_all.primaries.indexing.types._doc.index_total" - ) - task.replaceValueInMatch("_all.primaries.indexing.types._doc.index_total", 2) - // points get touched by sorting in ES 8 - task.replaceValueInMatch("testindex.shards.0.stats.fields.price.points", 1) - - //override for "indices.open/10_basic/?wait_for_active_shards default is deprecated" and "indices.open/10_basic/?wait_for_active_shards=index-setting" - task.addAllowedWarningRegexForTest("\\?wait_for_active_shards=index-setting is now the default behaviour.*", "?wait_for_active_shards=index-setting") - task.removeWarningForTest("the default value for the ?wait_for_active_shards parameter will change from '0' to 'index-setting' in version 8; " + - "specify '?wait_for_active_shards=index-setting' to adopt the future default behaviour, or '?wait_for_active_shards=0' to preserve today's behaviour" - , "?wait_for_active_shards default is deprecated") - - // override for exception message change in #55291 tests cluster.voting_config_exclusions/10_basic/ - // 'Throw exception when adding voting config exclusion and specifying both node_ids and node_names', - // 'Throw exception when adding voting config exclusion without specifying nodes', - task.replaceValueTextByKeyValue("catch", - '/Please set node identifiers correctly. One and only one of \\[node_name\\], \\[node_names\\] and \\[node_ids\\] has to be set/', - '/You must set \\[node_names\\] or \\[node_ids\\] but not both/') - - // sync_id is no longer available in SegmentInfos.userData // "indices.flush/10_basic/Index synced flush rest test" - task.replaceIsTrue("indices.testing.shards.0.0.commit.user_data.sync_id", "indices.testing.shards.0.0.commit.user_data") - - // we can now search using doc values only - task.replaceValueInMatch("fields.object\\.nested1.long.searchable", true) - - //client.type no longer exists #101214 - task.replaceKeyInMatch("nodes.\$node_id.settings.client.type", "nodes.\$node_id.settings.node.attr.testattr") - task.replaceValueInMatch("nodes.\$node_id.settings.node.attr.testattr", "test") - task.replaceKeyInMatch("nodes.\$node_id.settings.client\\.type", "nodes.\$node_id.settings.node\\.attr\\.testattr") - task.replaceValueInMatch("nodes.\$node_id.settings.node\\.attr\\.testattr", "test") -} - tasks.register('enforceYamlTestConvention').configure { def tree = fileTree('src/main/resources/rest-api-spec/test') doLast { diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/cluster.post_voting_config_exclusions_with_node_name_part.json b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/cluster.post_voting_config_exclusions_with_node_name_part.json deleted file mode 100644 index 2cdc2f3bc9aea..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/cluster.post_voting_config_exclusions_with_node_name_part.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "cluster.post_voting_config_exclusions_with_node_name_part":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/voting-config-exclusions.html", - "description":"Updates the cluster voting config exclusions by node_name (not node ids or node names)." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_cluster/voting_config_exclusions/{node_name}", - "methods":[ - "POST" - ], - "parts":{ - "node_name":{ - "type":"string", - "description":"A comma-separated list of node descriptors of the nodes to exclude from the voting configuration." - } - }, - "deprecated":{ - "version":"7.8.0", - "description":"node_name is deprecated, use node_names or node_ids instead" - } - } - ] - } - } -} diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/indices.put_template_with_param.json b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/indices.put_template_with_param.json deleted file mode 100644 index 7ee6cbd39ebf3..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/api/indices.put_template_with_param.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "indices.put_template_with_param":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates.html", - "description":"Creates or updates an index template." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_template/{name}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "name":{ - "type":"string", - "description":"The name of the template" - } - } - } - ] - }, - "params":{ - "template":{ - "type":"string", - "description":"The indices that this template should apply to, replaced by index_patterns within the template definition." - }, - "order":{ - "type":"number", - "description":"The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower numbers)" - }, - "create":{ - "type":"boolean", - "description":"Whether the index template should only be added if new or can also replace an existing one", - "default":false - }, - "master_timeout":{ - "type":"time", - "description":"Specify timeout for connection to master" - } - }, - "body":{ - "description":"The template definition", - "required":true - } - } -} diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic_compat.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic_compat.yml deleted file mode 100644 index 8806918703abe..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic_compat.yml +++ /dev/null @@ -1,18 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings_regex" - ---- -"Throw exception when adding voting config exclusion by specifying a 'node_name'": - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - cluster.post_voting_config_exclusions_with_node_name_part: - node_name: someNodeName - warnings_regex: - - ".* /_cluster/voting_config_exclusions/\\{node_name\\} has been removed. .*" - catch: /\[node_name\] has been removed, you must set \[node_names\] or \[node_ids\]/ diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.deprecated.upgrade/10_basic_upgrade.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.deprecated.upgrade/10_basic_upgrade.yml deleted file mode 100644 index b368975fa5e5b..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.deprecated.upgrade/10_basic_upgrade.yml +++ /dev/null @@ -1,42 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "allowed_warnings_regex" - ---- -Basic test for upgrade indices: - - requires: - cluster_features: ["gte_v7.11.0"] - reason: "_upgrade api is deprecated since 7.11.0" - test_runner_features: - - "warnings" - - do: - indices.create: - index: "test_index" - body: - settings: - index: - number_of_replicas: 0 - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - do: - catch: "bad_request" - indices.upgrade: - index: "test_index" - warnings: - - "The _upgrade API is no longer useful and will be removed. Instead, see _reindex\ - \ API." - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - match: - status: 400 - - match: - error.reason: "/Upgrade.action.(GET|POST).(_upgrade|/test_index/_upgrade).was.removed,.use._reindex.API.instead/" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.put_template/10_basic_compat.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.put_template/10_basic_compat.yml deleted file mode 100644 index 043e525a8e9b5..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/indices.put_template/10_basic_compat.yml +++ /dev/null @@ -1,66 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings" - ---- -"Put template": - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - warnings: - - "Deprecated field [template] used, replaced by [index_patterns]" - indices.put_template: - name: test - body: - template: test-* - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - field: - type: keyword - - - do: - indices.get_template: - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {properties: {field: {type: keyword}}}} - ---- -"Put template (with template parameter)": - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - warnings: - - "Deprecated parameter [template] used, replaced by [index_patterns]" - indices.put_template_with_param: - name: test - template: "test-*" - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - field: - type: keyword - - - do: - indices.get_template: - name: test - flat_settings: true - - - match: {test.index_patterns: ["test-*"]} - - match: {test.settings: {index.number_of_shards: '1', index.number_of_replicas: '0'}} - - match: {test.mappings: {properties: {field: {type: keyword}}}} diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/nodes.hot_threads/10_basic_compat.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/nodes.hot_threads/10_basic_compat.yml deleted file mode 100644 index c64e80d0f6a03..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/nodes.hot_threads/10_basic_compat.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "allowed_warnings_regex" - ---- -"Get hot threads": - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - nodes.hot_threads: {} - allowed_warnings_regex: - - ".*hot_?threads.* is a deprecated endpoint.*" - - match: - $body: /:::/ diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.aggregation/10_moving_avg.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.aggregation/10_moving_avg.yml deleted file mode 100644 index c3b3c4320be97..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.aggregation/10_moving_avg.yml +++ /dev/null @@ -1,28 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - ---- -moving_avg agg throws exception: - - do: - catch: "/Moving Average aggregation usage is not supported. Use the \\[moving_fn\\] aggregation instead./" - search: - rest_total_hits_as_int: true - body: - aggs: - the_histo: - date_histogram: - field: "date" - calendar_interval: "1d" - aggs: - the_avg: - avg: - field: "value_field" - the_movavg: - moving_avg: - buckets_path: "the_avg" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml deleted file mode 100644 index 323a5b9abbf1e..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml +++ /dev/null @@ -1,149 +0,0 @@ ---- -setup: -- skip: - features: - - "headers" - - "allowed_warnings_regex" -- do: - indices.create: - index: "my-index" - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - offer: - type: "nested" -- do: - index: - index: "my-index" - id: "1" - refresh: true - body: - offer: - price: 10 - color: blue - - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - -- do: - indices.create: - index: "my-locations" - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - properties: - pin: - properties: - location: - type: geo_point - offer: - type: "nested" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - -- do: - index: - index: "my-locations" - id: "1" - refresh: true - body: - offer: - price: 10 - color: blue - pin: - location: - lat: 40.12 - lon: -71.34 - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - - - - - ---- -"Sort with nested_path throws exception": -- do: - catch: /\[nested_path\] has been removed in favour of the \[nested\] parameter/ - search: - rest_total_hits_as_int: true - index: "my-index" - body: - sort: - - offer.price: - mode: avg - order: asc - nested_path: offer - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - ---- -"Sort with nested_filter throws exception": - - do: - catch: /\[nested_filter\] has been removed in favour of the \[nested\] parameter/ - search: - rest_total_hits_as_int: true - index: "my-index" - body: - sort: - - offer.price: - mode: avg - order: asc - nested_filter: - term: - offer.color: blue - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - - ---- -"Geo search with nested_filter throws exception": - - do: - catch: /\[nested_filter\] has been removed in favour of the \[nested\] parameter/ - search: - rest_total_hits_as_int: true - index: "my-locations" - body: - query: - match_all: {} - sort: - _geo_distance: - pin.location: - - -70 - - 40 - nested_filter: - term: - offer.color: blue - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - ---- -"Geo search with nested_path throws exception": - - do: - catch: /\[nested_path\] has been removed in favour of the \[nested\] parameter/ - search: - rest_total_hits_as_int: true - index: "my-locations" - body: - query: - match_all: {} - sort: - _geo_distance: - pin.location: - - -70 - - 40 - nested_path: "offer" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml deleted file mode 100644 index b7df872ff0a86..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml +++ /dev/null @@ -1,103 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "allowed_warnings_regex" - - do: - indices.create: - index: "test" - body: - mappings: - properties: - my_field1: - type: "text" - my_field2: - type: "text" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - do: - index: - index: "test" - id: "1" - body: - my_field1: "brown fox jump" - my_field2: "xylophone" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - do: - indices.refresh: {} - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - ---- -multi_match multiple fields with cutoff_frequency throws exception: -- do: - catch: "/cutoff_freqency is not supported. The \\[multi_match\\] query can skip block of documents efficiently if the total number of hits is not tracked/" - search: - rest_total_hits_as_int: true - index: "test" - body: - query: - multi_match: - query: "brown" - type: "bool_prefix" - fields: - - "my_field1" - - "my_field2" - cutoff_frequency: 0.001 - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - ---- -match with cutoff_frequency throws exception: - - do: - catch: "/cutoff_freqency is not supported. The \\[match\\] query can skip block of documents efficiently if the total number of hits is not tracked/" - search: - rest_total_hits_as_int: true - index: "test" - body: - query: - match: - my_field1: - query: "brown" - type: "bool_prefix" - cutoff_frequency: 0.001 - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - ---- -common querythrows exception: - - do: - catch: "/Common Terms Query usage is not supported. Use \\[match\\] query which can efficiently skip blocks of documents if the total number of hits is not tracked./" - search: - rest_total_hits_as_int: true - index: "test" - body: - query: - common: - my_field1: - query: "brown" - type: "bool_prefix" - cutoff_frequency: 0.001 - low_freq_operator: or - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_geo_bounding_box.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_geo_bounding_box.yml deleted file mode 100644 index 3f3eac1e59e1a..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_geo_bounding_box.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings" - - do: - indices.create: - index: locations - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - mappings: - - properties: - location: - type: geo_point - - do: - bulk: - index: locations - refresh: true - body: | - {"index":{}} - {"location" : {"lat": 13.5, "lon" : 34.89}} - {"index":{}} - {"location" : {"lat": -7.9, "lon" : 120.78}} - {"index":{}} - {"location" : {"lat": 45.78, "lon" : -173.45}} - {"index":{}} - {"location" : {"lat": 32.45, "lon" : 45.6}} - {"index":{}} - {"location" : {"lat": -63.24, "lon" : 31.0}} - {"index":{}} - {"location" : {"lat": 0.0, "lon" : 0.0}} - - ---- -"geo bounding box query not compatible": - - do: - catch: /failed to parse \[geo_bounding_box\] query. unexpected field \[type\]/ - search: - index: locations - body: - query: - geo_bounding_box: - type : indexed - location: - top_left: - lat: 10 - lon: -10 - bottom_right: - lat: -10 - lon: 10 - ---- -"geo bounding box query compatible": - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - warnings: - - "Deprecated parameter [type] used, it should no longer be specified." - search: - index: locations - body: - query: - geo_bounding_box: - type : indexed - location: - top_left: - lat: 10 - lon: -10 - bottom_right: - lat: -10 - lon: 10 - - match: {hits.total.value: 1} - diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml deleted file mode 100644 index fdaebbb2b81e7..0000000000000 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- -setup: - - skip: - features: - - "headers" - - "allowed_warnings_regex" ---- -type query throws exception when used: - - do: - index: - index: "test1" - id: "1" - type: "cat" - refresh: true - body: - foo: "bar" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - - do: - catch: /\[types removal\] Type queries are deprecated, prefer to filter on a field instead./ - search: - rest_total_hits_as_int: true - index: "test1" - body: - query: - type: - value: "cat" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - - - do: - catch: /\[types removal\] Type queries are deprecated, prefer to filter on a field instead./ - search: - rest_total_hits_as_int: true - index: "test1" - body: - query: - type: - value: "_doc" - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java index 937addb473f8b..c80f13861e83f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperParsingException; @@ -33,6 +34,8 @@ protected boolean forbidPrivateIndexSettings() { * contains unknown parameters. We were able to create those templates in 7.x still, so we need * to be able to index new documents into them. Indexing should issue a deprecation warning though. */ + @UpdateForV9 + @AwaitsFix(bugUrl = "this is testing 7.x specific compatibility which may be n/a now after 9.0 bump") public void testBWCMalformedDynamicTemplate() { // this parameter is not supported by "keyword" field type String mapping = """ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java index f9dc42cb7abe8..1e3a7d5a6b817 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -107,6 +108,8 @@ public void testGetShardSnapshotOnEmptyRepositoriesListThrowsAnError() { expectThrows(IllegalArgumentException.class, () -> getLatestSnapshotForShardFuture(Collections.emptyList(), "idx", 0, false)); } + @UpdateForV9 + // below we were selecting an index version between current and 7.5.0, this has been updated to 8.0.0 now but that might need to change public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exception { final String repoName = "repo-name"; final Path repoPath = randomRepoPath(); @@ -114,7 +117,7 @@ public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exce final boolean useBwCFormat = randomBoolean(); if (useBwCFormat) { - final IndexVersion version = randomVersionBetween(random(), IndexVersions.V_7_5_0, IndexVersion.current()); + final IndexVersion version = randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()); initWithSnapshotVersion(repoName, repoPath, version); } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 5189094294239..ad50856c556f7 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -266,6 +266,8 @@ static TransportVersion def(int id) { * Reference to the earliest compatible transport version to this version of the codebase. * This should be the transport version used by the highest minor version of the previous major. */ + @UpdateForV9 + // This needs to be bumped to the 8.last public static final TransportVersion MINIMUM_COMPATIBLE = V_7_17_0; /** diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 0164c6b80fa6b..1f3ab73889278 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Assertions; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -185,7 +185,8 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_1 = new Version(8_15_01_99); public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_16_0 = new Version(8_16_00_99); - public static final Version CURRENT = V_8_16_0; + public static final Version V_9_0_0 = new Version(9_00_00_99); + public static final Version CURRENT = V_9_0_0; private static final NavigableMap VERSION_IDS; private static final Map VERSION_STRINGS; @@ -221,14 +222,7 @@ public class Version implements VersionId, ToXContentFragment { } } } - assert RestApiVersion.current().major == CURRENT.major && RestApiVersion.previous().major == CURRENT.major - 1 - : "RestApiVersion must be upgraded " - + "to reflect major from Version.CURRENT [" - + CURRENT.major - + "]" - + " but is still set to [" - + RestApiVersion.current().major - + "]"; + assertRestApiVersion(); builder.put(V_EMPTY_ID, V_EMPTY); builderByString.put(V_EMPTY.toString(), V_EMPTY); @@ -236,6 +230,19 @@ public class Version implements VersionId, ToXContentFragment { VERSION_STRINGS = Map.copyOf(builderByString); } + @UpdateForV9 + // Re-enable this assertion once the rest api version is bumped + private static void assertRestApiVersion() { + // assert RestApiVersion.current().major == CURRENT.major && RestApiVersion.previous().major == CURRENT.major - 1 + // : "RestApiVersion must be upgraded " + // + "to reflect major from Version.CURRENT [" + // + CURRENT.major + // + "]" + // + " but is still set to [" + // + RestApiVersion.current().major + // + "]"; + } + public static Version readVersion(StreamInput in) throws IOException { return fromId(in.readVInt()); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 608d88fdef664..7bc8273eef525 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -166,7 +166,7 @@ private static IndexVersion def(int id, Version luceneVersion) { * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; + public static final IndexVersion MINIMUM_COMPATIBLE = V_8_0_0; static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; @@ -217,8 +217,10 @@ static NavigableMap getAllVersionIds(Class cls) { return Collections.unmodifiableNavigableMap(builder); } + @UpdateForV9 + // We can simplify this once we've removed all references to index versions earlier than MINIMUM_COMPATIBLE static Collection getAllVersions() { - return VERSION_IDS.values(); + return VERSION_IDS.values().stream().filter(v -> v.onOrAfter(MINIMUM_COMPATIBLE)).toList(); } static final IntFunction VERSION_LOOKUP = ReleaseVersions.generateVersionsLookup(IndexVersions.class, LATEST_DEFINED.id()); diff --git a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java index ca574a5c7eba3..a71394aaaf39f 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java +++ b/server/src/main/java/org/elasticsearch/rest/RestCompatibleVersionHelper.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.MediaType; import org.elasticsearch.xcontent.ParsedMediaType; @@ -25,6 +26,7 @@ class RestCompatibleVersionHelper { /** * @return The requested API version, or {@link Optional#empty()} if there was no explicit version in the request. */ + @UpdateForV9 static Optional getCompatibleVersion( @Nullable ParsedMediaType acceptHeader, @Nullable ParsedMediaType contentTypeHeader, @@ -49,7 +51,8 @@ static Optional getCompatibleVersion( if (hasContent) { // content-type version must be current or prior - if (contentTypeVersion > RestApiVersion.current().major || contentTypeVersion < RestApiVersion.minimumSupported().major) { + // This can be uncommented once all references to RestApiVersion.V_7 are removed + /*if (contentTypeVersion > RestApiVersion.current().major || contentTypeVersion < RestApiVersion.minimumSupported().major) { throw new ElasticsearchStatusException( "Content-Type version must be either version {} or {}, but found {}. Content-Type={}", RestStatus.BAD_REQUEST, @@ -58,7 +61,7 @@ static Optional getCompatibleVersion( contentTypeVersion, contentTypeHeader ); - } + }*/ // if both accept and content-type are sent, the version must match if (contentTypeVersion != acceptVersion) { throw new ElasticsearchStatusException( diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java index b6f1ac46b4250..4ff99d17195a0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java @@ -54,7 +54,7 @@ protected VersionStats mutateInstance(VersionStats instance) { return new VersionStats(instance.versionStats().stream().map(svs -> { return switch (randomIntBetween(1, 4)) { case 1 -> new VersionStats.SingleVersionStats( - IndexVersions.V_7_3_0, + IndexVersions.V_8_3_0, svs.indexCount, svs.primaryShardCount, svs.totalPrimaryByteCount @@ -89,12 +89,12 @@ public void testCreation() { metadata = new Metadata.Builder().put(indexMeta("foo", IndexVersion.current(), 4), true) .put(indexMeta("bar", IndexVersion.current(), 3), true) - .put(indexMeta("baz", IndexVersions.V_7_0_0, 2), true) + .put(indexMeta("baz", IndexVersions.V_8_0_0, 2), true) .build(); stats = VersionStats.of(metadata, Collections.emptyList()); assertThat(stats.versionStats().size(), equalTo(2)); VersionStats.SingleVersionStats s1 = new VersionStats.SingleVersionStats(IndexVersion.current(), 2, 7, 0); - VersionStats.SingleVersionStats s2 = new VersionStats.SingleVersionStats(IndexVersions.V_7_0_0, 1, 2, 0); + VersionStats.SingleVersionStats s2 = new VersionStats.SingleVersionStats(IndexVersions.V_8_0_0, 1, 2, 0); assertThat(stats.versionStats(), containsInAnyOrder(s1, s2)); ShardId shardId = new ShardId("bar", "uuid", 0); @@ -135,7 +135,7 @@ public void testCreation() { stats = VersionStats.of(metadata, Collections.singletonList(nodeResponse)); assertThat(stats.versionStats().size(), equalTo(2)); s1 = new VersionStats.SingleVersionStats(IndexVersion.current(), 2, 7, 100); - s2 = new VersionStats.SingleVersionStats(IndexVersions.V_7_0_0, 1, 2, 0); + s2 = new VersionStats.SingleVersionStats(IndexVersions.V_8_0_0, 1, 2, 0); assertThat(stats.versionStats(), containsInAnyOrder(s1, s2)); } @@ -144,7 +144,7 @@ private static IndexMetadata indexMeta(String name, IndexVersion version, int pr } public static VersionStats randomInstance() { - List versions = List.of(IndexVersion.current(), IndexVersions.V_7_0_0, IndexVersions.V_7_1_0, IndexVersions.V_7_2_0); + List versions = List.of(IndexVersion.current(), IndexVersions.V_8_0_0, IndexVersions.V_8_1_0, IndexVersions.V_8_2_0); List stats = new ArrayList<>(); for (IndexVersion v : versions) { VersionStats.SingleVersionStats s = new VersionStats.SingleVersionStats( diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java index c39cc6ebfd665..4edc6ce589486 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponseTests.java @@ -10,10 +10,8 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.IndexMode; @@ -23,7 +21,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Base64; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -37,8 +34,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomIndexResponsesWithMappingHash; import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomIndexResponsesWithoutMappingHash; import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; -import static org.hamcrest.Matchers.anEmptyMap; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.nullValue; @@ -191,49 +186,6 @@ public void testSerializeNodeResponseBetweenOldNodes() throws IOException { } } - public void testReadNodeResponseFromPre82() throws Exception { - final Version minCompactVersion = Version.CURRENT.minimumCompatibilityVersion(); - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(Version.V_8_2_0)); - String base64 = "AwhpbmRleF8wMQIKYmx1ZV9maWVsZApibHVlX2ZpZWxkBGxvbmcAAQEAAAAJcmVkX2ZpZWxkCXJlZF9maWVsZAR0ZXh0AAEAAAAAAQhpbm" - + "RleF8wMgAACGluZGV4XzAzAgdfc2VxX25vB19zZXFfbm8EbG9uZwEBAQAAAAx5ZWxsb3dfZmllbGQMeWVsbG93X2ZpZWxkB2tleXdvcmQAAQEAAAABAAEI" - + "aW5kZXhfMTAGdXVpZF9hAQ=="; - StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(base64)); - in.setTransportVersion(TransportVersions.V_8_1_0); - FieldCapabilitiesNodeResponse nodeResp = new FieldCapabilitiesNodeResponse(in); - assertThat(nodeResp.getUnmatchedShardIds(), equalTo(Set.of(new ShardId("index_10", "uuid_a", 1)))); - assertThat(nodeResp.getFailures(), anEmptyMap()); - assertThat( - nodeResp.getIndexResponses(), - contains( - new FieldCapabilitiesIndexResponse( - "index_01", - null, - Map.of( - "red_field", - new IndexFieldCapabilities("red_field", "text", false, true, false, false, null, Map.of()), - "blue_field", - new IndexFieldCapabilities("blue_field", "long", false, true, true, false, null, Map.of()) - ), - true, - IndexMode.STANDARD - ), - new FieldCapabilitiesIndexResponse("index_02", null, Map.of(), false, IndexMode.STANDARD), - new FieldCapabilitiesIndexResponse( - "index_03", - null, - Map.of( - "yellow_field", - new IndexFieldCapabilities("yellow_field", "keyword", false, true, true, false, null, Map.of()), - "_seq_no", - new IndexFieldCapabilities("_seq_no", "long", true, true, true, false, null, Map.of()) - ), - true, - IndexMode.STANDARD - ) - ) - ); - } - private static FieldCapabilitiesNodeResponse randomNodeResponse(List indexResponses) { int numUnmatched = randomIntBetween(0, 3); final Set unmatchedShardIds = new HashSet<>(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index cc4d4de1e0f39..bdeef56eca89b 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -11,10 +11,8 @@ import org.elasticsearch.ElasticsearchExceptionTests; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ChunkedToXContent; @@ -29,7 +27,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Base64; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -40,8 +37,6 @@ import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomIndexResponsesWithMappingHash; import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomIndexResponsesWithoutMappingHash; import static org.elasticsearch.action.fieldcaps.FieldCapabilitiesIndexResponseTests.randomMappingHashToIndices; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.nullValue; @@ -246,46 +241,4 @@ public void testSerializeCCSResponseBetweenOldClusters() throws IOException { } } } - - public void testReadCCSResponseFromPre82() throws Exception { - final Version minCompactVersion = Version.CURRENT.minimumCompatibilityVersion(); - assertTrue("Remove this test once minCompactVersion >= 8.2.0", minCompactVersion.before(Version.V_8_2_0)); - String base64 = "AAADCGluZGV4XzAxAgpibHVlX2ZpZWxkCmJsdWVfZmllbGQEbG9uZwABAQAAAAlyZWRfZmllbGQJcmVkX2ZpZWxkBHRleHQAAQAAAAABC" - + "GluZGV4XzAyAAAIaW5kZXhfMDMCDHllbGxvd19maWVsZAx5ZWxsb3dfZmllbGQHa2V5d29yZAABAQAAAAdfc2VxX25vB19zZXFfbm8EbG9uZwEBAQAAAA" - + "EAAAAAAAAAAAA="; - StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(base64)); - in.setTransportVersion(TransportVersions.V_8_1_0); - FieldCapabilitiesResponse nodeResp = new FieldCapabilitiesResponse(in); - assertThat(nodeResp.getFailures(), empty()); - assertThat( - nodeResp.getIndexResponses(), - contains( - new FieldCapabilitiesIndexResponse( - "index_01", - null, - Map.of( - "red_field", - new IndexFieldCapabilities("red_field", "text", false, true, false, false, null, Map.of()), - "blue_field", - new IndexFieldCapabilities("blue_field", "long", false, true, true, false, null, Map.of()) - ), - true, - IndexMode.STANDARD - ), - new FieldCapabilitiesIndexResponse("index_02", null, Map.of(), false, IndexMode.STANDARD), - new FieldCapabilitiesIndexResponse( - "index_03", - null, - Map.of( - "yellow_field", - new IndexFieldCapabilities("yellow_field", "keyword", false, true, true, false, null, Map.of()), - "_seq_no", - new IndexFieldCapabilities("_seq_no", "long", true, true, true, false, null, Map.of()) - ), - true, - IndexMode.STANDARD - ) - ) - ); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 01394a7abbcd5..2190f8f20e762 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexNotFoundException; @@ -1313,6 +1314,8 @@ public void testRejectTranslogRetentionSettings() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "looks like a test that's not applicable to 9.0 after version bump") public void testDeprecateTranslogRetentionSettings() { request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); final Settings.Builder settings = Settings.builder(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index 955d7d2de6882..0faff63a72682 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; @@ -866,20 +867,23 @@ public void testFindMappingsWithFilters() throws IOException { public void testOldestIndexComputation() { Metadata metadata = buildIndicesWithVersions( - IndexVersions.V_7_0_0, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), IndexVersion.fromId(IndexVersion.current().id() + 1) ).build(); - assertEquals(IndexVersions.V_7_0_0, metadata.oldestIndexVersion()); + assertEquals(IndexVersions.MINIMUM_COMPATIBLE, metadata.oldestIndexVersion()); Metadata.Builder b = Metadata.builder(); assertEquals(IndexVersion.current(), b.build().oldestIndexVersion()); Throwable ex = expectThrows( IllegalArgumentException.class, - () -> buildIndicesWithVersions(IndexVersions.V_7_0_0, IndexVersions.ZERO, IndexVersion.fromId(IndexVersion.current().id() + 1)) - .build() + () -> buildIndicesWithVersions( + IndexVersions.MINIMUM_COMPATIBLE, + IndexVersions.ZERO, + IndexVersion.fromId(IndexVersion.current().id() + 1) + ).build() ); assertEquals("[index.version.created] is not present in the index settings for index with UUID [null]", ex.getMessage()); @@ -1962,6 +1966,8 @@ public void testHiddenAliasValidation() { } } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") public void testSystemAliasValidationMixedVersionSystemAndRegularFails() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), @@ -2012,6 +2018,8 @@ public void testSystemAliasValidationNewSystemAndRegularFails() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") public void testSystemAliasOldSystemAndNewRegular() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), @@ -2025,6 +2033,8 @@ public void testSystemAliasOldSystemAndNewRegular() { metadataWithIndices(oldVersionSystem, regularIndex); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") public void testSystemIndexValidationAllRegular() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), @@ -2039,6 +2049,8 @@ public void testSystemIndexValidationAllRegular() { metadataWithIndices(currentVersionSystem, currentVersionSystem2, oldVersionSystem); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this test needs to be updated or removed after the version 9.0 bump") public void testSystemAliasValidationAllSystemSomeOld() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java index aa62553447db0..47da6f8cdc0f8 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/CompatibleNamedXContentRegistryTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; @@ -114,6 +115,8 @@ public static NewSubObject parse(XContentParser parser) { } } + @UpdateForV9 + @AwaitsFix(bugUrl = "this can be re-enabled once our rest api version is bumped to V_9") public void testNotCompatibleRequest() throws IOException { NamedXContentRegistry registry = new NamedXContentRegistry( List.of( diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index adab51a37d2bf..b7dea50ee4386 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.gateway.PersistedClusterStateService; import org.elasticsearch.index.Index; @@ -537,6 +538,8 @@ public void testBlocksDowngradeToVersionWithMultipleNodesInDataPath() throws IOE } } + @UpdateForV9 + @AwaitsFix(bugUrl = "test won't work until we remove and bump minimum index versions") public void testIndexCompatibilityChecks() throws IOException { final Settings settings = buildEnvSettings(Settings.EMPTY); @@ -634,6 +637,8 @@ public void testSymlinkDataDirectory() throws Exception { env.close(); } + @UpdateForV9 + @AwaitsFix(bugUrl = "test won't work until we remove and bump minimum index versions") public void testGetBestDowngradeVersion() { assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.0"), Matchers.equalTo("7.17.0")); assertThat(NodeEnvironment.getBestDowngradeVersion("7.17.5"), Matchers.equalTo("7.17.5")); diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index f60812977d578..499861ceb346d 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -78,6 +79,8 @@ public void testEqualsHashcodeSerialization() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "as mentioned in the comment below, the behavior here is changing for 9.0 so this test needs updating") public void testReadsFormatWithoutVersion() throws IOException { // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_7_0_0)); @@ -151,6 +154,8 @@ public void testDoesNotUpgradeAncientVersion() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "Needs to be updated for 9.0 version bump") public void testUpgradeMarksPreviousVersion() { final String nodeId = randomAlphaOfLength(10); final Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_8_0_0); diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 379adc9ce517a..b2db13c1481ec 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -159,20 +159,6 @@ public void testSortingAgainstAliases() { assertEquals("Cannot use alias [field] as an index sort field", e.getMessage()); } - public void testSortingAgainstAliasesPre713() { - IndexSettings indexSettings = indexSettings( - Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_12_0).put("index.sort.field", "field").build() - ); - MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); - Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased)); - assertThat(sort.getSort(), arrayWithSize(1)); - assertThat(sort.getSort()[0].getField(), equalTo("aliased")); - assertWarnings( - "Index sort for index [test] defined on field [field] which resolves to field [aliased]. " - + "You will not be able to define an index sort over aliased fields in new indexes" - ); - } - public void testTimeSeriesMode() { IndexSettings indexSettings = indexSettings( Settings.builder() diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index dcf73ec617e60..d37d03407f691 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.Version; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.hamcrest.Matchers; @@ -32,27 +33,27 @@ public class IndexVersionTests extends ESTestCase { public void testVersionComparison() { - IndexVersion V_7_2_0 = IndexVersions.V_7_2_0; - IndexVersion V_8_0_0 = IndexVersions.V_8_0_0; - assertThat(V_7_2_0.before(V_8_0_0), is(true)); - assertThat(V_7_2_0.before(V_7_2_0), is(false)); - assertThat(V_8_0_0.before(V_7_2_0), is(false)); - - assertThat(V_7_2_0.onOrBefore(V_8_0_0), is(true)); - assertThat(V_7_2_0.onOrBefore(V_7_2_0), is(true)); - assertThat(V_8_0_0.onOrBefore(V_7_2_0), is(false)); - - assertThat(V_7_2_0.after(V_8_0_0), is(false)); - assertThat(V_7_2_0.after(V_7_2_0), is(false)); - assertThat(V_8_0_0.after(V_7_2_0), is(true)); - - assertThat(V_7_2_0.onOrAfter(V_8_0_0), is(false)); - assertThat(V_7_2_0.onOrAfter(V_7_2_0), is(true)); - assertThat(V_8_0_0.onOrAfter(V_7_2_0), is(true)); - - assertThat(V_7_2_0, is(lessThan(V_8_0_0))); - assertThat(V_7_2_0.compareTo(V_7_2_0), is(0)); - assertThat(V_8_0_0, is(greaterThan(V_7_2_0))); + IndexVersion V_8_2_0 = IndexVersions.V_8_2_0; + IndexVersion current = IndexVersion.current(); + assertThat(V_8_2_0.before(current), is(true)); + assertThat(V_8_2_0.before(V_8_2_0), is(false)); + assertThat(current.before(V_8_2_0), is(false)); + + assertThat(V_8_2_0.onOrBefore(current), is(true)); + assertThat(V_8_2_0.onOrBefore(V_8_2_0), is(true)); + assertThat(current.onOrBefore(V_8_2_0), is(false)); + + assertThat(V_8_2_0.after(current), is(false)); + assertThat(V_8_2_0.after(V_8_2_0), is(false)); + assertThat(current.after(V_8_2_0), is(true)); + + assertThat(V_8_2_0.onOrAfter(current), is(false)); + assertThat(V_8_2_0.onOrAfter(V_8_2_0), is(true)); + assertThat(current.onOrAfter(V_8_2_0), is(true)); + + assertThat(V_8_2_0, is(lessThan(current))); + assertThat(V_8_2_0.compareTo(V_8_2_0), is(0)); + assertThat(current, is(greaterThan(V_8_2_0))); } public static class CorrectFakeVersion { @@ -149,6 +150,8 @@ public void testMax() { } } + @UpdateForV9 + @AwaitsFix(bugUrl = "believe this fails because index version has not yet been bumped to 9.0") public void testMinimumCompatibleVersion() { assertThat(IndexVersion.getMinimumCompatibleIndexVersion(7170099), equalTo(IndexVersion.fromId(6000099))); assertThat(IndexVersion.getMinimumCompatibleIndexVersion(8000099), equalTo(IndexVersion.fromId(7000099))); @@ -189,6 +192,8 @@ public void testParseLenient() { } } + @UpdateForV9 + @AwaitsFix(bugUrl = "can be unmuted once lucene is bumped to version 10") public void testLuceneVersionOnUnknownVersions() { // between two known versions, should use the lucene version of the previous version IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 47d75c6d0bd13..818dafcfeeda1 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -175,7 +175,6 @@ import java.util.function.ToLongBiFunction; import java.util.stream.Collectors; import java.util.stream.LongStream; -import java.util.stream.StreamSupport; import static java.util.Collections.shuffle; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; @@ -6616,117 +6615,6 @@ public void testRebuildLocalCheckpointTrackerAndVersionMap() throws Exception { } } - public void testRecoverFromHardDeletesIndex() throws Exception { - IndexWriterFactory hardDeletesWriter = (directory, iwc) -> new IndexWriter(directory, iwc) { - boolean isTombstone(Iterable doc) { - return StreamSupport.stream(doc.spliterator(), false).anyMatch(d -> d.name().equals(Lucene.SOFT_DELETES_FIELD)); - } - - @Override - public long addDocument(Iterable doc) throws IOException { - if (isTombstone(doc)) { - return 0; - } - return super.addDocument(doc); - } - - @Override - public long addDocuments(Iterable> docs) throws IOException { - if (StreamSupport.stream(docs.spliterator(), false).anyMatch(this::isTombstone)) { - return 0; - } - return super.addDocuments(docs); - } - - @Override - public long softUpdateDocument(Term term, Iterable doc, Field... softDeletes) throws IOException { - if (isTombstone(doc)) { - return super.deleteDocuments(term); - } else { - return super.updateDocument(term, doc); - } - } - - @Override - public long softUpdateDocuments(Term term, Iterable> docs, Field... softDeletes) - throws IOException { - if (StreamSupport.stream(docs.spliterator(), false).anyMatch(this::isTombstone)) { - return super.deleteDocuments(term); - } else { - return super.updateDocuments(term, docs); - } - } - }; - final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); - Path translogPath = createTempDir(); - List operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean(), randomBoolean()); - final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()) - .settings( - Settings.builder() - .put(defaultSettings.getSettings()) - .put( - IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0) - ) - .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) - ) - .build(); - final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); - try (Store store = createStore()) { - EngineConfig config = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get); - final List docs; - try ( - InternalEngine hardDeletesEngine = createEngine( - indexSettings, - store, - translogPath, - newMergePolicy(), - hardDeletesWriter, - null, - globalCheckpoint::get - ) - ) { - for (Engine.Operation op : operations) { - applyOperation(hardDeletesEngine, op); - if (randomBoolean()) { - hardDeletesEngine.syncTranslog(); - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), hardDeletesEngine.getPersistedLocalCheckpoint())); - } - if (randomInt(100) < 10) { - hardDeletesEngine.refresh("test"); - } - if (randomInt(100) < 5) { - hardDeletesEngine.flush(true, true); - } - } - docs = getDocIds(hardDeletesEngine, true); - } - // We need to remove min_retained_seq_no commit tag as the actual hard-deletes engine does not have it. - store.trimUnsafeCommits(translogPath); - Map userData = new HashMap<>(store.readLastCommittedSegmentsInfo().userData); - userData.remove(Engine.MIN_RETAINED_SEQNO); - IndexWriterConfig indexWriterConfig = new IndexWriterConfig(null).setOpenMode(IndexWriterConfig.OpenMode.APPEND) - .setIndexCreatedVersionMajor(IndexVersion.current().luceneVersion().major) - .setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) - .setCommitOnClose(false) - .setMergePolicy(NoMergePolicy.INSTANCE); - try (IndexWriter writer = new IndexWriter(store.directory(), indexWriterConfig)) { - writer.setLiveCommitData(userData.entrySet()); - writer.commit(); - } - try (InternalEngine softDeletesEngine = new InternalEngine(config)) { // do not recover from translog - assertThat(softDeletesEngine.getLastCommittedSegmentInfos().userData, equalTo(userData)); - assertThat(softDeletesEngine.getVersionMap().keySet(), empty()); - recoverFromTranslog(softDeletesEngine, translogHandler, Long.MAX_VALUE); - if (randomBoolean()) { - engine.forceMerge(randomBoolean(), 1, false, UUIDs.randomBase64UUID()); - } - assertThat(getDocIds(softDeletesEngine, true), equalTo(docs)); - assertConsistentHistoryBetweenTranslogAndLuceneIndex(softDeletesEngine); - } - } - } - void assertLuceneOperations(InternalEngine engine, long expectedAppends, long expectedUpdates, long expectedDeletes) { String message = "Lucene operations mismatched;" + " appends [actual:" @@ -7500,14 +7388,14 @@ public void testTrimUnsafeCommitHasESVersionInUserData() throws IOException { .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); try (IndexWriter indexWriter = new IndexWriter(store.directory(), indexWriterConfig)) { Map commitUserDataWithOlderVersion = new HashMap<>(committedSegmentsInfo.userData); - commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersions.V_7_0_0.toString()); + commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersions.MINIMUM_COMPATIBLE.toString()); indexWriter.setLiveCommitData(commitUserDataWithOlderVersion.entrySet()); indexWriter.commit(); } Map userDataBeforeTrimUnsafeCommits = store.readLastCommittedSegmentsInfo().getUserData(); assertThat(userDataBeforeTrimUnsafeCommits, hasKey(ES_VERSION)); - assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersions.V_7_0_0.toString()))); + assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersions.MINIMUM_COMPATIBLE.toString()))); store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index aa28e8a3bd5b4..ff4f7e3c2e52e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -19,9 +19,7 @@ import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.script.DateFieldScript; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -45,7 +43,6 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -import static org.mockito.Mockito.mock; public class DateFieldMapperTests extends MapperTestCase { @@ -247,10 +244,6 @@ public void testBadNullValue() throws IOException { + "failed to parse date field [foo] with format [strict_date_optional_time||epoch_millis]" ) ); - - createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))); - - assertWarnings("Error parsing [foo] as date in [null_value] on field [field]); [null_value] will be ignored"); } public void testNullConfigValuesFail() { @@ -753,51 +746,4 @@ public void testLegacyField() throws Exception { assertNotEquals(DEFAULT_DATE_TIME_FORMATTER, ((DateFieldType) service.fieldType("mydate")).dateTimeFormatter); } - public void testLegacyDateFormatName() { - DateFieldMapper.Builder builder = new DateFieldMapper.Builder( - "format", - DateFieldMapper.Resolution.MILLISECONDS, - null, - mock(ScriptService.class), - true, - // BWC compatible index, e.g 7.x - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ) - ); - - // Check that we allow the use of camel case date formats on 7.x indices - @SuppressWarnings("unchecked") - FieldMapper.Parameter formatParam = (FieldMapper.Parameter) builder.getParameters()[3]; - formatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime"); - builder.buildFormatter(); // shouldn't throw exception - - formatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime||strictDateOptionalTimeNanos"); - builder.buildFormatter(); // shouldn't throw exception - - DateFieldMapper.Builder newFieldBuilder = new DateFieldMapper.Builder( - "format", - DateFieldMapper.Resolution.MILLISECONDS, - null, - mock(ScriptService.class), - true, - IndexVersion.current() - ); - - @SuppressWarnings("unchecked") - final FieldMapper.Parameter newFormatParam = (FieldMapper.Parameter) newFieldBuilder.getParameters()[3]; - - // Check that we don't allow the use of camel case date formats on 8.x indices - assertEquals( - "Error parsing [format] on field [format]: Invalid format: [strictDateOptionalTime]: Unknown pattern letter: t", - expectThrows(IllegalArgumentException.class, () -> { - newFormatParam.parse("date_time_format", mock(MappingParserContext.class), "strictDateOptionalTime"); - assertEquals("strictDateOptionalTime", newFormatParam.getValue()); - newFieldBuilder.buildFormatter(); - }).getMessage() - ); - - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index a5a5d9726f233..4d6e730afded0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -20,10 +20,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator; import org.elasticsearch.test.XContentTestUtils; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -221,61 +219,25 @@ public void testSimpleWithXContentTraverse() throws Exception { } public void testDynamicMapperWithBadMapping() throws IOException { - { - // in 7.x versions this will issue a deprecation warning - IndexVersion version = IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ); - DocumentMapper mapper = createDocumentMapper(version, topMapping(b -> { - b.startArray("dynamic_templates"); - { - b.startObject(); - { - b.startObject("test"); - { - b.field("match_mapping_type", "string"); - b.startObject("mapping").field("badparam", false).endObject(); - } - b.endObject(); - } - b.endObject(); - } - b.endArray(); - })); - assertWarnings( - "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " - + "Usage will result in an error in future major versions and should be removed." - ); - mapper.parse(source(b -> b.field("field", "foo"))); - assertWarnings( - "Parameter [badparam] is used in a dynamic template mapping and has no effect on type [null]. " - + "Usage will result in an error in future major versions and should be removed." - ); - } - - { - // in 8.x it will error out - Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(topMapping(b -> { - b.startArray("dynamic_templates"); + // in 8.x it will error out + Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(topMapping(b -> { + b.startArray("dynamic_templates"); + { + b.startObject(); { - b.startObject(); + b.startObject("test"); { - b.startObject("test"); - { - b.field("match_mapping_type", "string"); - b.startObject("mapping").field("badparam", false).endObject(); - } - b.endObject(); + b.field("match_mapping_type", "string"); + b.startObject("mapping").field("badparam", false).endObject(); } b.endObject(); } - b.endArray(); - }))); - assertThat(e.getMessage(), containsString("dynamic template [test] has invalid content")); - assertThat(e.getCause().getMessage(), containsString("badparam")); - } + b.endObject(); + } + b.endArray(); + }))); + assertThat(e.getMessage(), containsString("dynamic template [test] has invalid content")); + assertThat(e.getCause().getMessage(), containsString("badparam")); } public void testDynamicRuntimeWithBadMapping() { @@ -677,35 +639,6 @@ public void testIllegalDynamicTemplateNoMappingTypeRuntime() throws Exception { assertEquals("unknown parameter [foo] on runtime field [__dynamic__my_template] of type [date]", e.getRootCause().getMessage()); } - public void testIllegalDynamicTemplate7DotXIndex() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject(); - { - mapping.startObject(MapperService.SINGLE_MAPPING_NAME); - mapping.startArray("dynamic_templates"); - { - mapping.startObject(); - mapping.startObject("my_template"); - mapping.field("match_mapping_type", "string"); - mapping.startObject("mapping"); - mapping.field("type", "string"); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - } - mapping.endArray(); - mapping.endObject(); - } - mapping.endObject(); - IndexVersion createdVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); - MapperService mapperService = createMapperService(createdVersion, mapping); - assertThat(mapperService.documentMapper().mappingSource().toString(), containsString("\"type\":\"string\"")); - assertWarnings(""" - dynamic template [my_template] has invalid content \ - [{"match_mapping_type":"string","mapping":{"type":"string"}}], attempted to validate it \ - with the following match_mapping_type: [string], last error: [No mapper found for type [string]]"""); - } - public void testTemplateWithoutMatchPredicates() throws Exception { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 0b5fef2b5971c..11544d81a6914 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -9,9 +9,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.termvectors.TermVectorsService; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -80,39 +78,4 @@ public void testUsingEnabledSettingThrows() { ); } - /** - * disabling the _field_names should still work for indices before 8.0 - */ - public void testUsingEnabledBefore8() throws Exception { - - DocumentMapper docMapper = createDocumentMapper( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), - topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject()) - ); - - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); - assertFalse(fieldNamesMapper.fieldType().isEnabled()); - - ParsedDocument doc = docMapper.parse(source(b -> b.field("field", "value"))); - assertNull(doc.rootDoc().get("_field_names")); - } - - /** - * Merging the "_field_names" enabled setting is forbidden in 8.0, but we still want to tests the behavior on pre-8 indices - */ - public void testMergingMappingsBefore8() throws Exception { - MapperService mapperService = createMapperService( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), - mapping(b -> {}) - ); - - merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject())); - assertFalse(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - - merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", true).endObject())); - assertTrue(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); - assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index a38775e76c689..1fc5b370e4614 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -208,12 +207,6 @@ public void testNullValue() throws IOException { e.getMessage(), "Failed to parse mapping: Error parsing [null_value] on field [field]: ':1' is not an IP string literal." ); - - createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> { - b.field("type", "ip"); - b.field("null_value", ":1"); - })); - assertWarnings("Error parsing [:1] as IP in [null_value] on field [field]); [null_value] will be ignored"); } public void testDimension() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 4a9791fce7496..5579a8522a450 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -26,8 +27,6 @@ import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptCompiler; -import org.elasticsearch.test.TransportVersionUtils; -import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -547,37 +546,6 @@ public void testDeprecatedParameterName() { {"field":{"type":"test_mapper","fixed2":true,"required":"value"}}""", Strings.toString(mapper)); } - /** - * test parsing mapping from dynamic templates, should ignore unknown parameters for bwc and log warning before 8.0.0 - */ - public void testBWCunknownParametersfromDynamicTemplates() { - String mapping = """ - {"type":"test_mapper","some_unknown_parameter":true,"required":"value"}"""; - TestMapper mapper = fromMapping( - mapping, - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), - TransportVersionUtils.randomVersionBetween( - random(), - TransportVersions.V_7_0_0, - TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_0_0) - ), - true - ); - assertNotNull(mapper); - assertWarnings( - "Parameter [some_unknown_parameter] is used in a dynamic template mapping and has no effect on type [test_mapper]. " - + "Usage will result in an error in future major versions and should be removed." - ); - assertEquals(""" - {"field":{"type":"test_mapper","required":"value"}}""", Strings.toString(mapper)); - - MapperParsingException ex = expectThrows( - MapperParsingException.class, - () -> fromMapping(mapping, IndexVersions.V_8_0_0, TransportVersions.V_8_0_0, true) - ); - assertEquals("unknown parameter [some_unknown_parameter] on mapper [field] of type [test_mapper]", ex.getMessage()); - } - public void testAnalyzers() { String mapping = """ {"type":"test_mapper","analyzer":"_standard","required":"value"}"""; @@ -609,6 +577,8 @@ public void testAnalyzers() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public void testDeprecatedParameters() { // 'index' is declared explicitly, 'store' is not, but is one of the previously always-accepted params String mapping = """ diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java index 035466d93ab06..8013a93581757 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java @@ -70,9 +70,6 @@ public void testMultiFieldWithinMultiField() throws IOException { Mapper.TypeParser typeParser = KeywordFieldMapper.PARSER; - // For indices created prior to 8.0, we should only emit a warning and not fail parsing. - Map fieldNode = XContentHelper.convertToMap(BytesReference.bytes(mapping), true, mapping.contentType()).v2(); - MapperService mapperService = mock(MapperService.class); IndexAnalyzers indexAnalyzers = IndexAnalyzers.of(defaultAnalyzers()); when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers); @@ -86,32 +83,6 @@ public void testMultiFieldWithinMultiField() throws IOException { IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY); when(mapperService.getIndexSettings()).thenReturn(indexSettings); - IndexVersion olderVersion = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); - MappingParserContext olderContext = new MappingParserContext( - null, - type -> typeParser, - type -> null, - olderVersion, - () -> TransportVersions.MINIMUM_COMPATIBLE, - null, - ScriptCompiler.NONE, - mapperService.getIndexAnalyzers(), - mapperService.getIndexSettings(), - ProvidedIdFieldMapper.NO_FIELD_DATA, - query -> { - throw new UnsupportedOperationException(); - } - ); - - TextFieldMapper.PARSER.parse("some-field", fieldNode, olderContext); - assertWarnings( - "At least one multi-field, [sub-field], " - + "was encountered that itself contains a multi-field. Defining multi-fields within a multi-field is deprecated " - + "and is not supported for indices created in 8.0 and later. To migrate the mappings, all instances of [fields] " - + "that occur within a [fields] block should be removed from the mappings, either by flattening the chained " - + "[fields] blocks into a single level, or switching to [copy_to] if appropriate." - ); - // For indices created in 8.0 or later, we should throw an error. Map fieldNodeCopy = XContentHelper.convertToMap(BytesReference.bytes(mapping), true, mapping.contentType()).v2(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java index 1df42368041ac..a3fd002e68a1c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java @@ -11,13 +11,13 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.script.field.vectors.BinaryDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.ByteBinaryDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.DenseVector; import org.elasticsearch.script.field.vectors.DenseVectorDocValuesField; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.index.IndexVersionUtils; import java.io.IOException; import java.nio.ByteBuffer; @@ -32,7 +32,7 @@ public void testFloatGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { + for (IndexVersion indexVersion : List.of(IndexVersionUtils.randomCompatibleVersion(random()), IndexVersion.current())) { BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); DenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", ElementType.FLOAT, dims, indexVersion); DenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 83b8a8fa991c2..ad719e398ae37 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -1321,24 +1321,6 @@ public void testDefaultParamsIndexByDefault() throws Exception { assertEquals(VectorSimilarity.COSINE, denseVectorFieldType.getSimilarity()); } - public void testAddDocumentsToIndexBefore_V_7_5_0() throws Exception { - IndexVersion indexVersion = IndexVersions.V_7_4_0; - DocumentMapper mapper = createDocumentMapper( - indexVersion, - fieldMapping(b -> b.field("index", false).field("type", "dense_vector").field("dims", 3)) - ); - - float[] validVector = { -12.1f, 100.7f, -4 }; - ParsedDocument doc1 = mapper.parse(source(b -> b.array("field", validVector))); - List fields = doc1.rootDoc().getFields("field"); - assertEquals(1, fields.size()); - assertThat(fields.get(0), instanceOf(BinaryDocValuesField.class)); - // assert that after decoding the indexed value is equal to expected - BytesRef vectorBR = fields.get(0).binaryValue(); - float[] decodedValues = decodeDenseVector(indexVersion, vectorBR); - assertArrayEquals("Decoded dense vector values is not equal to the indexed one.", validVector, decodedValues, 0.001f); - } - public void testValidateOnBuild() { final MapperBuilderContext context = MapperBuilderContext.root(false, false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 9cfbbad5ebf50..2bfd8740a971c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.document.FeatureField; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; @@ -22,11 +21,8 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matchers; import org.junit.AssumptionViolatedException; @@ -226,44 +222,6 @@ protected IndexVersion boostNotAllowedIndexVersion() { return NEW_SPARSE_VECTOR_INDEX_VERSION; } - public void testSparseVectorWith7xIndex() throws Exception { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), PREVIOUS_SPARSE_VECTOR_INDEX_VERSION); - - XContentBuilder builder = XContentFactory.jsonBuilder() - .startObject() - .startObject("_doc") - .startObject("properties") - .startObject("my-vector") - .field("type", "sparse_vector") - .endObject() - .endObject() - .endObject() - .endObject(); - - DocumentMapper mapper = createDocumentMapper(version, builder); - assertWarnings(SparseVectorFieldMapper.ERROR_MESSAGE_7X); - - // Check that new vectors cannot be indexed. - int[] indexedDims = { 65535, 50, 2 }; - float[] indexedValues = { 0.5f, 1800f, -34567.11f }; - BytesReference source = BytesReference.bytes( - XContentFactory.jsonBuilder() - .startObject() - .startObject("my-vector") - .field(Integer.toString(indexedDims[0]), indexedValues[0]) - .field(Integer.toString(indexedDims[1]), indexedValues[1]) - .field(Integer.toString(indexedDims[2]), indexedValues[2]) - .endObject() - .endObject() - ); - - DocumentParsingException indexException = expectThrows( - DocumentParsingException.class, - () -> mapper.parse(new SourceToParse("id", source, XContentType.JSON)) - ); - assertThat(indexException.getCause().getMessage(), containsString(SparseVectorFieldMapper.ERROR_MESSAGE_7X)); - } - public void testSparseVectorUnsupportedIndex() throws Exception { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java index 80b08f907be8d..9e581aa7711ef 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/VectorEncoderDecoderTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -27,7 +28,7 @@ public void testVectorDecodingWithOffset() { for (IndexVersion version : List.of( IndexVersionUtils.randomVersionBetween( random(), - DenseVectorFieldMapper.MAGNITUDE_STORED_INDEX_VERSION, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion(DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION) ), DenseVectorFieldMapper.LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java index a003436fc0523..f45b349d0770b 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -73,7 +73,7 @@ public float score(float freq, long norm) { }; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, negativeScoresSim) + () -> SimilarityService.validateSimilarity(IndexVersions.MINIMUM_COMPATIBLE, negativeScoresSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarities should not return negative scores")); @@ -98,7 +98,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, decreasingScoresWithFreqSim) + () -> SimilarityService.validateSimilarity(IndexVersions.MINIMUM_COMPATIBLE, decreasingScoresWithFreqSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not decrease when term frequency increases")); @@ -123,7 +123,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, increasingScoresWithNormSim) + () -> SimilarityService.validateSimilarity(IndexVersions.MINIMUM_COMPATIBLE, increasingScoresWithNormSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not increase when norm increases")); } diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 628ff4b99b133..686acc367ade5 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.indices; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; @@ -96,6 +97,8 @@ public Map getMetadataMappers() { DataStreamTimestampFieldMapper.NAME, FieldNamesFieldMapper.NAME }; + @UpdateForV9 + @AwaitsFix(bugUrl = "test is referencing 7.x index versions so needs to be updated for 9.0 bump") public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); { diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index c4d6cb6be502d..ff65464085c03 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.Analysis; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -187,34 +186,6 @@ public void testUnderscoreInAnalyzerName() throws IOException { } } - public void testStandardFilterBWC() throws IOException { - // standard tokenfilter should have been removed entirely in the 7x line. However, a - // cacheing bug meant that it was still possible to create indexes using a standard - // filter until 7.6 - { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_6_0, IndexVersion.current()); - final Settings settings = Settings.builder() - .put("index.analysis.analyzer.my_standard.tokenizer", "standard") - .put("index.analysis.analyzer.my_standard.filter", "standard") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetadata.SETTING_VERSION_CREATED, version) - .build(); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> getIndexAnalyzers(settings)); - assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed.")); - } - { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2); - final Settings settings = Settings.builder() - .put("index.analysis.analyzer.my_standard.tokenizer", "standard") - .put("index.analysis.analyzer.my_standard.filter", "standard") - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetadata.SETTING_VERSION_CREATED, version) - .build(); - getIndexAnalyzers(settings); - assertWarnings("The [standard] token filter is deprecated and will be removed in a future version."); - } - } - /** * Tests that plugins can register pre-configured char filters that vary in behavior based on Elasticsearch version, Lucene version, * and that do not vary based on version at all. diff --git a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java index acefa2958ea0a..796a16c00da58 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestCompatibleVersionHelperTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ParsedMediaType; import org.hamcrest.CustomTypeSafeMatcher; @@ -163,6 +164,8 @@ public void testAcceptAndContentTypeCombinations() { assertThat(requestWith(acceptHeader(null), contentTypeHeader("application/json"), bodyPresent()), not(isCompatible())); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this can be re-enabled once our rest api version is bumped to V_9") public void testObsoleteVersion() { ElasticsearchStatusException e = expectThrows( ElasticsearchStatusException.class, diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java index 8bd53047b2dc7..78e1d8f8440a9 100644 --- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java @@ -45,11 +45,15 @@ public void testFloatVectorClassBindings() throws IOException { List fields = List.of( new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), + BinaryDenseVectorScriptDocValuesTests.wrap( + new float[][] { docVector }, + ElementType.FLOAT, + IndexVersions.MINIMUM_COMPATIBLE + ), "test", ElementType.FLOAT, dims, - IndexVersions.V_7_4_0 + IndexVersions.MINIMUM_COMPATIBLE ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), @@ -238,11 +242,15 @@ public void testByteVsFloatSimilarity() throws IOException { List fields = List.of( new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), + BinaryDenseVectorScriptDocValuesTests.wrap( + new float[][] { docVector }, + ElementType.FLOAT, + IndexVersions.MINIMUM_COMPATIBLE + ), "field0", ElementType.FLOAT, dims, - IndexVersions.V_7_4_0 + IndexVersions.MINIMUM_COMPATIBLE ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java index 2be338efd7174..d06593d0349ba 100644 --- a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java @@ -68,7 +68,7 @@ public void testFloatVsListQueryVector() { assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); - for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { + for (IndexVersion indexVersion : List.of(IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())) { BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, ElementType.FLOAT, indexVersion); BinaryDenseVector bdv = new BinaryDenseVector(docVector, value, dims, indexVersion); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 86aaa66b85bd5..c922feeb9f660 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -507,22 +507,6 @@ public final void testMeta() throws IOException { ); } - public final void testDeprecatedBoostWarning() throws IOException { - try { - createMapperService(DEPRECATED_BOOST_INDEX_VERSION, fieldMapping(b -> { - minimalMapping(b, DEPRECATED_BOOST_INDEX_VERSION); - b.field("boost", 2.0); - })); - String[] warnings = Strings.concatStringArrays( - getParseMinimalWarnings(DEPRECATED_BOOST_INDEX_VERSION), - new String[] { "Parameter [boost] on field [field] is deprecated and has no effect" } - ); - assertWarnings(warnings); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), anyOf(containsString("Unknown parameter [boost]"), containsString("[boost : 2.0]"))); - } - } - public void testBoostNotAllowed() throws IOException { MapperParsingException e = expectThrows( MapperParsingException.class, diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index 1b00ba3e9fd09..dd3c59e8fc365 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -142,10 +143,13 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { assertEquals("Failed to parse mapping: " + fieldName() + " is not configurable", exception.getMessage()); } + @UpdateForV9 + // This was previously testing for index versions between 7.0.0 and 8.6.0 but has been bumped to 8.0.0 - 8.6.0 + // Verify this is the correct action. public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, previousVersion); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, previousVersion); assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); MapperService mapperService = createMapperService(version, mapping(b -> {})); // these parameters were previously silently ignored, they will still be ignored in existing indices diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index ad738d8985e03..728e1ca73ded0 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; @@ -364,12 +365,14 @@ protected static Settings.Builder indexSettingsNoReplicas(int shards) { /** * Randomly write an empty snapshot of an older version to an empty repository to simulate an older repository metadata format. */ + @UpdateForV9 + // This used to pick an index version from 7.0.0 to 8.9.0. The minimum now is 8.0.0 but it's not clear what the upper range should be protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) throws Exception { if (randomBoolean() && randomBoolean()) { initWithSnapshotVersion( repoName, repoPath, - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_9_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.V_8_9_0) ); } } diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index 19254880a7089..c79644ee31225 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -13,12 +13,6 @@ restResources { } } -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - "autoscaling/get_autoscaling_capacity/Test get fixed autoscaling capacity", - ].join(',') -} - testClusters.configureEach { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index ff45cc9430633..4fdc4c3af4190 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -22,8 +22,7 @@ dependencies { // let the yamlRestTests see the classpath of test GradleUtils.extendSourceSet(project, "test", "yamlRestTest", tasks.named("yamlRestTest")) -int compatVersion = VersionProperties.getElasticsearchVersion().getMajor() - 1; -GradleUtils.extendSourceSet(project, "test", "yamlRestTestV${compatVersion}Compat") +GradleUtils.extendSourceSet(project, "test", "yamlRestCompatTest") restResources { restApi { @@ -59,120 +58,6 @@ tasks.named("yamlRestTest").configure { systemProperty 'tests.rest.blacklist', restTestBlacklist.join(',') } -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'unsigned_long/50_script_values/Scripted sort values', - 'unsigned_long/50_script_values/script_score query', - 'unsigned_long/50_script_values/Script query', - 'aggregate-metrics/90_tsdb_mappings/aggregate_double_metric with time series mappings', - 'aggregate-metrics/90_tsdb_mappings/aggregate_double_metric with wrong time series mappings', - 'analytics/histogram/histogram with wrong time series mappings', - 'analytics/histogram/histogram with time series mappings', - 'analytics/boxplot/Basic Search', - 'analytics/boxplot/Search With Runtime Field', - 'analytics/boxplot/Search With Missing', - 'analytics/moving_percentile/Basic Search TDigest', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc given actual_field is int', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc include curve', - 'ml/evaluate_data_frame/Test classification auc_roc', - 'ml/evaluate_data_frame/Test classification auc_roc with default top_classes_field', - ].join(',') -} - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest( - "vectors/10_dense_vector_basic/Deprecated function signature", - "to support it, it would require to almost revert back the #48725 and complicate the code" - ) - task.skipTest( - "vectors/20_dense_vector_special_cases/Indexing of Dense vectors should error when dims don't match defined in the mapping", - "Error message has changed" - ) - task.skipTest("vectors/30_sparse_vector_basic/Cosine Similarity", "not supported for compatibility") - task.skipTest("vectors/30_sparse_vector_basic/Deprecated function signature", "not supported for compatibility") - task.skipTest("vectors/30_sparse_vector_basic/Dot Product", "not supported for compatibility") - task.skipTest("vectors/35_sparse_vector_l1l2/L1 norm", "not supported for compatibility") - task.skipTest("vectors/35_sparse_vector_l1l2/L2 norm", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Dimensions can be sorted differently", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Documents missing a vector field", "not supported for compatibility") - task.skipTest( - "vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", - "not supported for compatibility" - ) - task.skipTest("vectors/40_sparse_vector_special_cases/Sparse vectors should error with dense vector functions", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Vectors of different dimensions and data types", "not supported for compatibility") - task.skipTest("vectors/50_vector_stats/Usage stats on vector fields", "not supported for compatibility") - task.skipTest( - "roles/30_prohibited_role_query/Test use prohibited query inside role query", - "put role request with a term lookup (deprecated) and type. Requires validation in REST layer" - ) - task.skipTest("ml/jobs_crud/Test update job", "Behaviour change #89824 - added limit filter to categorization analyzer") - task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") - task.skipTest("ml/jobs_crud/Test cannot create job with model snapshot id set", "Exception type has changed.") - task.skipTest("ml/validate/Test job config is invalid because model snapshot id set", "Exception type has changed.") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest( - "ml/datafeeds_crud/Test update datafeed to point to different job", - "behaviour change #44752 - not allowing to update datafeed job_id" - ) - task.skipTest( - "ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", - "behaviour change #44752 - not allowing to update datafeed job_id" - ) - task.skipTest( - "ml/trained_model_cat_apis/Test cat trained models", - "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)" - ) - task.skipTest( - "ml/categorization_agg/Test categorization agg simple", - "categorize_text was changed in 8.3, but experimental prior to the change" - ) - task.skipTest( - "ml/categorization_agg/Test categorization aggregation against unsupported field", - "categorize_text was changed in 8.3, but experimental prior to the change" - ) - task.skipTest( - "ml/categorization_agg/Test categorization aggregation with poor settings", - "categorize_text was changed in 8.3, but experimental prior to the change" - ) - task.skipTest("indices.freeze/30_usage/Usage stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") - task.skipTest("indices.freeze/20_stats/Translog stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") - task.skipTest("indices.freeze/10_basic/Basic", "#70192 -- the freeze index API is removed from 8.0") - task.skipTest("indices.freeze/10_basic/Test index options", "#70192 -- the freeze index API is removed from 8.0") - task.skipTest("sql/sql/Paging through results", "scrolling through search hit queries no longer produces empty last page in 8.2") - task.skipTest("sql/translate/Translate SQL", "query folding changed in v 8.5, added track_total_hits: -1") - task.skipTest("service_accounts/10_basic/Test get service accounts", "new service accounts are added") - task.skipTest("spatial/70_script_doc_values/diagonal length", "precision changed in 8.4.0") - task.skipTest("spatial/70_script_doc_values/geoshape value", "error message changed in 8.9.0") - task.skipTest("security/authz/14_cat_indices/Test empty request while single authorized index", "not supported for compatibility") - task.skipTestsByFilePattern("**/rollup/**", "The rollup yaml tests in the 7.x branch don't know how to fake a cluster with rollup usage") - - task.replaceValueInMatch("_type", "_doc") - task.addAllowedWarningRegex("\\[types removal\\].*") - task.addAllowedWarningRegexForTest("Including \\[accept_enterprise\\] in get license.*", "Installing enterprise license") - task.addAllowedWarningRegex("bucket_span .* is not an integral .* of the number of seconds in 1d.* This is now deprecated.*") - - task.replaceValueTextByKeyValue( - "catch", - 'bad_request', - '/It is no longer possible to freeze indices, but existing frozen indices can still be unfrozen/', - "Cannot freeze write index for data stream" - ) - - task.replaceValueInMatch( - "error.reason", - "action [cluster:admin/xpack/security/api_key/invalidate] is unauthorized for user [api_key_user_1] with effective roles [user_role], this action is granted by the cluster privileges [manage_api_key,manage_security,all]", - "Test invalidate api key by realm name" - ) - - task.replaceValueInMatch( - "error.reason", - "action [cluster:admin/xpack/security/api_key/invalidate] is unauthorized for user [api_key_user_1] with effective roles [user_role], this action is granted by the cluster privileges [manage_api_key,manage_security,all]", - "Test invalidate api key by username" - ) -} - tasks.register('enforceApiSpecsConvention').configure { def mainApiSpecs = fileTree('src/test/resources/rest-api-spec/api') doLast { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 467ef3c68f648..008792966a4b2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; @@ -1812,6 +1813,8 @@ void updateAutoFollowMetadata(Function updateFunctio assertThat(counter.get(), equalTo(states.length)); } + @UpdateForV9 + @AwaitsFix(bugUrl = "ability to disable soft deletes was removed in 8.0 indexes so we can probably remove this test") public void testAutoFollowerSoftDeletesDisabled() { Client client = mock(Client.class); when(client.getRemoteClusterClient(anyString(), any(), any())).thenReturn(new RedirectToLocalClusterRemoteClusterClient(client)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java index 1fb7b7c36827e..f988a6fd5769c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ilm; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -388,11 +387,7 @@ public void testPerformActionAttrsNoShard() { public void testPerformActionSomeShardsOnlyOnNewNodes() throws Exception { VersionInformation oldVersion = new VersionInformation( - VersionUtils.randomVersionBetween( - random(), - Version.fromId(Version.CURRENT.major * 1_000_000 + 99), - VersionUtils.getPreviousVersion() - ), + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); @@ -457,11 +452,7 @@ public void testPerformActionSomeShardsOnlyOnNewNodes() throws Exception { public void testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs() { VersionInformation oldVersion = new VersionInformation( - VersionUtils.randomVersionBetween( - random(), - Version.fromId(Version.CURRENT.major * 1_000_000 + 99), - VersionUtils.getPreviousVersion() - ), + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); @@ -534,11 +525,7 @@ public void testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs() { public void testPerformActionNewShardsExistButWithInvalidAttributes() throws Exception { VersionInformation oldVersion = new VersionInformation( - VersionUtils.randomVersionBetween( - random(), - Version.fromId(Version.CURRENT.major * 1_000_000 + 99), - VersionUtils.getPreviousVersion() - ), + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index 62f89f650dec2..18872d00d54a0 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -160,7 +160,7 @@ public void testCamelCaseDeprecation() throws IOException { + "} }"; IndexMetadata simpleIndex = IndexMetadata.builder(randomAlphaOfLengthBetween(5, 10)) - .settings(settings(IndexVersions.V_7_0_0)) + .settings(settings(IndexVersions.MINIMUM_COMPATIBLE)) .numberOfShards(1) .numberOfReplicas(1) .putMapping(simpleMapping) diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index 603d69d695cac..ba5ac7b0c7317 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -29,14 +29,10 @@ artifacts { tasks.named('yamlRestTest') { usesDefaultDistribution() } -tasks.named('yamlRestTestV7CompatTest') { +tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } if (BuildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("yamlRestTest").configure{enabled = false } } - -tasks.named("yamlRestTestV7CompatTransform").configure { task -> - task.skipTest("rollup/10_basic/Rollup index", "Downsample for TSDB changed the configuration") -} diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index b0f1e8bd026b0..eb14d6fe46958 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -6,8 +6,6 @@ * Side Public License, v 1. */ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -21,9 +19,6 @@ dependencies { javaRestTestImplementation(testArtifact(project(":qa:full-cluster-restart"), "javaRestTest")) } -assert Version.fromString(VersionProperties.getVersions().get("elasticsearch")).getMajor() == 8: - "If we are targeting a branch other than 8, we should enable migration tests" - BuildParams.bwcVersions.withWireCompatible(v -> v.after("8.8.0")) { bwcVersion, baseName -> tasks.register(bwcTaskName(bwcVersion), StandaloneRestIntegTestTask) { usesBwcDistribution(bwcVersion) diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java index e01b8de941851..5e3fd5eb6d112 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/xpack/application/FullClusterRestartIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.ObjectPath; @@ -25,6 +26,7 @@ import static org.elasticsearch.Version.V_8_12_0; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +@UpdateForV9 // Investigate what needs to be added in terms of 9.0 migration testing public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCase { // DSL was introduced with version 8.12.0 of ES. private static final Version DSL_DEFAULT_RETENTION_VERSION = V_8_12_0; diff --git a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java index 1d5ed1ffdcbab..478e2f93db25f 100644 --- a/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java +++ b/x-pack/plugin/eql/qa/correctness/src/javaRestTest/java/org/elasticsearch/xpack/eql/EsEQLCorrectnessIT.java @@ -13,6 +13,7 @@ import org.apache.http.HttpHost; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.client.HttpAsyncResponseConsumerFactory; import org.elasticsearch.client.Request; @@ -45,6 +46,7 @@ @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) @TestLogging(value = "org.elasticsearch.xpack.eql.EsEQLCorrectnessIT:INFO", reason = "Log query execution time") +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/112572") public class EsEQLCorrectnessIT extends ESRestTestCase { private static final String PARAM_FORMATTING = "%1$s"; diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index d035005758a54..d5b0cc42091f3 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -26,18 +26,10 @@ tasks.named('javaRestTest') { tasks.named('yamlRestTest') { usesDefaultDistribution() } -tasks.named('yamlRestTestV7CompatTest') { +tasks.named('yamlRestCompatTest') { usesDefaultDistribution() } -tasks.named("yamlRestTestV7CompatTransform").configure {task -> - task.skipTest("eql/10_basic/Execute EQL events query with wildcard (*) fields filtering.", "Change of locale with Java 23 makes these tests non deterministic") - task.skipTest("eql/10_basic/Execute EQL sequence with fields filtering.", "Change of locale with Java 23 makes these tests non deterministic") - task.skipTest("eql/10_basic/Execute EQL sequence with custom format for timestamp field.", "Change of locale with Java 23 makes these tests non deterministic") - task.skipTest("eql/10_basic/Execute EQL events query with fields filtering", "Change of locale with Java 23 makes these tests non deterministic") - task.skipTest("eql/10_basic/Execute EQL sequence with wildcard (*) fields filtering.", "Change of locale with Java 23 makes these tests non deterministic") -} - if (BuildParams.inFipsJvm){ // This test cluster is using a BASIC license and FIPS 140 mode is not supported in BASIC tasks.named("javaRestTest").configure{enabled = false } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 02a2cac0513c0..d59dda273ed6e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1314,7 +1314,7 @@ COUNT(c):long | a:integer 0 | 1 ; -countDistinctNull#[skip:-8.99.99,reason:not yet fixed] +countDistinctNull#[skip:-9.99.99,reason:not yet fixed] ROW a = 1, c = null | STATS COUNT_DISTINCT(c) BY a; diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index 9c04584cdf130..e011723da6230 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -37,15 +37,6 @@ restResources { } } -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - '50_script_values/Scripted fields values return Long', - '50_script_values/Scripted sort values', - '50_script_values/Script query', - '50_script_values/script_score query' - ].join(',') -} - if (BuildParams.isSnapshotBuild() == false) { tasks.named("test").configure { systemProperty 'es.index_mode_feature_flag_registered', 'true' diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index b19fa4ab5f185..bc22552d0d734 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -12,7 +12,7 @@ dependencies { testImplementation project(':x-pack:qa') } -Version ccsCompatVersion = new Version(VersionProperties.getElasticsearchVersion().getMajor(), VersionProperties.getElasticsearchVersion().getMinor() - 1, 0) +Version ccsCompatVersion = BuildParams.bwcVersions.minimumWireCompatibleVersion restResources { restApi { diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index bf1538b4e5dd8..42b5ba83a0828 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; import org.elasticsearch.Version; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.test.ESTestCase; public class OldCodecsAvailableTests extends ESTestCase { @@ -16,6 +17,8 @@ public class OldCodecsAvailableTests extends ESTestCase { * Reminder to add Lucene BWC codecs under {@link org.elasticsearch.xpack.lucene.bwc.codecs} whenever Elasticsearch is upgraded * to the next major Lucene version. */ + @UpdateForV9 + @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 594d356becf87..bd090b528bb97 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -7,11 +7,13 @@ package org.elasticsearch.xpack.searchablesnapshots.upgrade; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -27,6 +29,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "this testing a number of pre 8.0 upgrade scenarios so needs updating or removal for 9.0") public class SearchableSnapshotIndexMetadataUpgraderTests extends ESTestCase { public void testNoUpgradeNeeded() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java index 6777c38b809e0..01715cde33cf0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityImplicitBehaviorBootstrapCheckTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.Version; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.IndexVersion; @@ -25,92 +25,13 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackSettings; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class SecurityImplicitBehaviorBootstrapCheckTests extends AbstractBootstrapCheckTestCase { - public void testFailureUpgradeFrom7xWithImplicitSecuritySettings() throws Exception { - final BuildVersion previousVersion = toBuildVersion( - randomValueOtherThan( - Version.V_8_0_0, - () -> VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_8_0_0) - ) - ); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); - nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); - ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - Metadata metadata = createLicensesMetadata( - TrialLicenseVersion.fromXContent(previousVersion.toString()), - randomFrom("basic", "trial") - ); - License license = mock(License.class); - when(licenseService.getLicense(metadata)).thenReturn(license); - when(license.operationMode()).thenReturn(randomFrom(License.OperationMode.BASIC, License.OperationMode.TRIAL)); - BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext(Settings.EMPTY, metadata) - ); - assertThat(result.isFailure(), is(true)); - assertThat( - result.getMessage(), - equalTo( - "The default value for [" - + XPackSettings.SECURITY_ENABLED.getKey() - + "] has changed in the current version. " - + " Security features were implicitly disabled for this node but they would now be enabled, possibly" - + " preventing access to the node. " - + "See " - + ReferenceDocs.BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP - + " to configure security, or explicitly disable security by " - + "setting [xpack.security.enabled] to \"false\" in elasticsearch.yml before restarting the node." - ) - ); - } - - public void testUpgradeFrom7xWithImplicitSecuritySettingsOnGoldPlus() throws Exception { - final BuildVersion previousVersion = toBuildVersion( - randomValueOtherThan( - Version.V_8_0_0, - () -> VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_8_0_0) - ) - ); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); - nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); - ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - Metadata metadata = createLicensesMetadata( - TrialLicenseVersion.fromXContent(previousVersion.toString()), - randomFrom("gold", "platinum") - ); - License license = mock(License.class); - when(licenseService.getLicense(metadata)).thenReturn(license); - when(license.operationMode()).thenReturn(randomFrom(License.OperationMode.GOLD, License.OperationMode.PLATINUM)); - BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext(Settings.EMPTY, metadata) - ); - assertThat(result.isSuccess(), is(true)); - } - - public void testUpgradeFrom7xWithExplicitSecuritySettings() throws Exception { - final BuildVersion previousVersion = toBuildVersion( - randomValueOtherThan( - Version.V_8_0_0, - () -> VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.V_8_0_0) - ) - ); - NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); - nodeMetadata = nodeMetadata.upgradeToCurrentVersion(); - ClusterStateLicenseService licenseService = mock(ClusterStateLicenseService.class); - BootstrapCheck.BootstrapCheckResult result = new SecurityImplicitBehaviorBootstrapCheck(nodeMetadata, licenseService).check( - createTestContext( - Settings.builder().put(XPackSettings.SECURITY_ENABLED.getKey(), true).build(), - createLicensesMetadata(TrialLicenseVersion.fromXContent(previousVersion.toString()), randomFrom("basic", "trial")) - ) - ); - assertThat(result.isSuccess(), is(true)); - } - + @UpdateForV9 + @AwaitsFix(bugUrl = "requires updates for version 9.0 bump") public void testUpgradeFrom8xWithImplicitSecuritySettings() throws Exception { final BuildVersion previousVersion = toBuildVersion(VersionUtils.randomVersionBetween(random(), Version.V_8_0_0, null)); NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); @@ -125,6 +46,8 @@ public void testUpgradeFrom8xWithImplicitSecuritySettings() throws Exception { assertThat(result.isSuccess(), is(true)); } + @UpdateForV9 + @AwaitsFix(bugUrl = "requires updates for version 9.0 bump") public void testUpgradeFrom8xWithExplicitSecuritySettings() throws Exception { final BuildVersion previousVersion = toBuildVersion(VersionUtils.randomVersionBetween(random(), Version.V_8_0_0, null)); NodeMetadata nodeMetadata = new NodeMetadata(randomAlphaOfLength(10), previousVersion, IndexVersion.current()); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index 851d5f8f02b2a..449edd8f40ee2 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -202,6 +203,8 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener { boolean shareFilesWithSource = randomBoolean(); @@ -387,6 +390,8 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener { Store.MetadataSnapshot targetMetadataSnapshot = generateRandomTargetState(store); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java index 296af0c983279..2c92d5387d871 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.spatial.search; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; @@ -20,6 +22,8 @@ import java.util.Collection; import java.util.Collections; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT extends GeoBoundingBoxQueryIntegTestCase { @Override diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java index 3cf70b1d477b6..b48f5a8c17e98 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.spatial.search; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Circle; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -27,6 +29,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeWithDocValuesIT extends GeoShapeIntegTestCase { @Override diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 5999a3ff1e151..0e04cfe6757bf 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.Orientation; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; @@ -91,16 +92,6 @@ public void testDefaultConfiguration() throws IOException { assertTrue(fieldType.hasDocValues()); } - public void testDefaultDocValueConfigurationOnPre7_8() throws IOException { - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); - DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); - assertThat(fieldMapper, instanceOf(fieldMapperClass())); - - GeoShapeWithDocValuesFieldMapper geoShapeFieldMapper = (GeoShapeWithDocValuesFieldMapper) fieldMapper; - assertFalse(geoShapeFieldMapper.fieldType().hasDocValues()); - } - /** * Test that orientation parameter correctly parses */ @@ -289,6 +280,8 @@ public void testInvalidCurrentVersion() { ); } + @UpdateForV9 + @AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public void testGeoShapeLegacyMerge() throws Exception { IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MapperService m = createMapperService(version, fieldMapping(b -> b.field("type", getFieldName()))); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 35ccfe8deb5fe..d030a2bbf81ad 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -114,7 +114,7 @@ public void testDefaultConfiguration() throws IOException { public void testDefaultDocValueConfigurationOnPre8_4() throws IOException { // TODO verify which version this test is actually valid for (when PR is actually merged) - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_3_0); + IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.V_8_3_0); DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); assertThat(fieldMapper, instanceOf(fieldMapperClass())); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java index 593656411eb41..405ef5c480687 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -88,25 +87,12 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { } if (ESTestCase.randomBoolean()) { SearchExecutionContext context = AbstractBuilderTestCase.createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 - if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); - } else { - builder.relation( - ESTestCase.randomFrom( - ShapeRelation.DISJOINT, - ShapeRelation.INTERSECTS, - ShapeRelation.WITHIN, - ShapeRelation.CONTAINS - ) - ); - } + if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { + builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); } else { - if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS)); - } else { - builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN)); - } + builder.relation( + ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS) + ); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java index 053931a882e4c..de66d0b822c94 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java @@ -7,9 +7,11 @@ package org.elasticsearch.xpack.spatial.index.query; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.MultiPoint; @@ -39,6 +41,8 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; +@UpdateForV9 +@LuceneTestCase.AwaitsFix(bugUrl = "this is testing legacy functionality so can likely be removed in 9.0") public class LegacyGeoShapeWithDocValuesQueryTests extends GeoShapeQueryTestCase { @SuppressWarnings("deprecation") diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java index aa5ae72df2b9e..72073a6eff550 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.SearchExecutionContext; @@ -33,18 +32,10 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected ShapeRelation getShapeRelation(ShapeType type) { SearchExecutionContext context = createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 - if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS); - } else { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS); - } + if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { + return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS); } else { - if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS); - } else { - return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN); - } + return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.WITHIN, ShapeRelation.CONTAINS); } } diff --git a/x-pack/plugin/sql/qa/jdbc/build.gradle b/x-pack/plugin/sql/qa/jdbc/build.gradle index 42bf524dac17e..022306fe9b306 100644 --- a/x-pack/plugin/sql/qa/jdbc/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/build.gradle @@ -72,7 +72,7 @@ subprojects { // Configure compatibility testing tasks // Compatibility testing for JDBC driver started with version 7.9.0 - BuildParams.bwcVersions.allIndexCompatible.findAll({ it.onOrAfter(Version.fromString("7.9.0")) && it != VersionProperties.elasticsearchVersion }).each { bwcVersion -> + BuildParams.bwcVersions.indexCompatible.findAll({ it.onOrAfter(Version.fromString("7.9.0")) && it != VersionProperties.elasticsearchVersion }).each { bwcVersion -> def baseName = "v${bwcVersion}" def cluster = testClusters.register(baseName) diff --git a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec.test.geo_shape/10_compat_geo_shape_with_types.yml b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec.test.geo_shape/10_compat_geo_shape_with_types.yml deleted file mode 100644 index 2cf26f336ba47..0000000000000 --- a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec.test.geo_shape/10_compat_geo_shape_with_types.yml +++ /dev/null @@ -1,68 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings" - - "allowed_warnings_regex" - ---- -"Test geo_shape with type": - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - indices.create: - index: shapes - include_type_name: true - body: - mappings: - _doc: - properties: - location: - type: geo_shape - - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - allowed_warnings_regex: - - "\\[types removal\\].*" - index: - index: shapes - type: _doc - id: deu - body: - location: - type : "envelope" - coordinates: [[13.0, 53.0], [14.0, 52.0]] - - - do: - indices.refresh: {} - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - warnings: - - "[types removal] Types are deprecated in [geo_shape] queries. The type should no longer be specified in the [indexed_shape] section." - search: - rest_total_hits_as_int: true - index: shapes - size: 0 - body: - query: - bool: - filter: - geo_shape: - location: - indexed_shape: - index: "shapes" - type: "_doc" - id: "deu" - path: "location" - - - match: {hits.total: 1 } diff --git a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml deleted file mode 100644 index a2d05446bbe1a..0000000000000 --- a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml +++ /dev/null @@ -1,38 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings_regex" - - do: - index: - index: some-test-index-1 - id: "1" - body: { foo: bar } - ---- -"Freezing a non-existent index throws 404": - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - indices.freeze: - index: some-test-index-404 - catch: missing - ---- -"Freezing an index throws 410": - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - indices.freeze: - index: some-test-index-1 - catch: /It is no longer possible to freeze indices, but existing frozen indices can still be unfrozen/ - ---- -"Without compat headers throws 400": - - do: - indices.freeze: - index: some-test-index-1 - catch: /no handler found for uri/ diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index 13470e3c2e4ec..b429e123bb631 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -12,7 +12,7 @@ dependencies { testImplementation project(':x-pack:qa') } -Version ccsCompatVersion = new Version(VersionProperties.getElasticsearchVersion().getMajor(), VersionProperties.getElasticsearchVersion().getMinor() - 1, 0) +Version ccsCompatVersion = BuildParams.bwcVersions.minimumWireCompatibleVersion restResources { restApi { diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index 3f61bdcb3c2ed..a911c022212b2 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -35,25 +35,3 @@ if (BuildParams.inFipsJvm){ tasks.named("yamlRestTest").configure{enabled = false } } - -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - task.skipTest("mustache/30_search_input/Test search input mustache integration (using request body and rest_total_hits_as_int)", "remove JodaCompatibleDateTime -- ZonedDateTime doesn't output millis/nanos if they're 0 (#78417)") - task.skipTest("mustache/30_search_input/Test search input mustache integration (using request body)", "remove JodaCompatibleDateTime -- ZonedDateTime doesn't output millis/nanos if they're 0 (#78417)") - task.skipTest("mustache/40_search_transform/Test search transform mustache integration (using request body)", "remove JodaCompatibleDateTime -- ZonedDateTime doesn't output millis/nanos if they're 0 (#78417)") - task.skipTest("painless/40_exception/Test painless exceptions are returned when logging a broken response", "Exceptions are no longer thrown from Mustache, but from the transform action itself") - task.replaceKeyInDo("watcher.ack_watch", "xpack-watcher.ack_watch") - task.replaceKeyInDo("watcher.activate_watch", "xpack-watcher.activate_watch") - task.replaceKeyInDo("watcher.deactivate_watch", "xpack-watcher.deactivate_watch") - task.replaceKeyInDo("watcher.delete_watch", "xpack-watcher.delete_watch") - task.replaceKeyInDo("watcher.execute_watch", "xpack-watcher.execute_watch") - task.replaceKeyInDo("watcher.get_watch", "xpack-watcher.get_watch") - task.replaceKeyInDo("watcher.put_watch", "xpack-watcher.put_watch") - task.replaceKeyInDo("watcher.start", "xpack-watcher.start") - task.replaceKeyInDo("watcher.stats", "xpack-watcher.stats") - task.replaceKeyInDo("watcher.stop", "xpack-watcher.stop") - - task.addAllowedWarningRegex(".*_xpack/watcher/.* is deprecated.*") - task.addAllowedWarningRegex("\\[types removal\\].*") - task.replaceValueTextByKeyValue("path", "/my_index/my_type/{{ctx.watch_id}}", "/my_index/_doc/{{ctx.watch_id}}", - "Test webhook action with mustache integration") -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.ack_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.ack_watch.json deleted file mode 100644 index 66ee9a7d3b83f..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.ack_watch.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "xpack-watcher.ack_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-ack-watch.html", - "description":"Acknowledges a watch, manually throttling the execution of the watch's actions." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{watch_id}/_ack", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "watch_id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/watcher/watch/{watch_id}/_ack/{action_id}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "watch_id":{ - "type":"string", - "description":"Watch ID" - }, - "action_id":{ - "type":"list", - "description":"A comma-separated list of the action ids to be acked" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.activate_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.activate_watch.json deleted file mode 100644 index 2790b3ac69040..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.activate_watch.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "xpack-watcher.activate_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-activate-watch.html", - "description":"Activates a currently inactive watch." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{watch_id}/_activate", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "watch_id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.deactivate_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.deactivate_watch.json deleted file mode 100644 index 7633c5ab9ffcc..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.deactivate_watch.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "xpack-watcher.deactivate_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-deactivate-watch.html", - "description":"Deactivates a currently active watch." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{watch_id}/_deactivate", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "watch_id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.delete_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.delete_watch.json deleted file mode 100644 index b42003207eda4..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.delete_watch.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-watcher.delete_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-delete-watch.html", - "description":"Removes a watch from Watcher." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{id}", - "methods":[ - "DELETE" - ], - "parts":{ - "id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.execute_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.execute_watch.json deleted file mode 100644 index 4dba5bd1817a5..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.execute_watch.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "xpack-watcher.execute_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html", - "description":"Forces the execution of a stored watch." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{id}/_execute", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/watcher/watch/_execute", - "methods":[ - "PUT", - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "debug":{ - "type":"boolean", - "description":"indicates whether the watch should execute in debug mode", - "required":false - } - }, - "body":{ - "description":"Execution control", - "required":false - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.get_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.get_watch.json deleted file mode 100644 index 0fa158751fba4..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.get_watch.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "xpack-watcher.get_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-get-watch.html", - "description":"Retrieves a watch by its ID." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{id}", - "methods":[ - "GET" - ], - "parts":{ - "id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.put_watch.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.put_watch.json deleted file mode 100644 index b6d17b0d92739..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.put_watch.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "xpack-watcher.put_watch":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-put-watch.html", - "description":"Creates a new watch, or updates an existing one." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/watch/{id}", - "methods":[ - "PUT", - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"Watch ID" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "active":{ - "type":"boolean", - "description":"Specify whether the watch is in/active by default" - }, - "version":{ - "type":"number", - "description":"Explicit version number for concurrency control" - }, - "if_seq_no":{ - "type":"number", - "description":"only update the watch if the last operation that has changed the watch has the specified sequence number" - }, - "if_primary_term":{ - "type":"number", - "description":"only update the watch if the last operation that has changed the watch has the specified primary term" - } - }, - "body":{ - "description":"The watch", - "required":false - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.start.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.start.json deleted file mode 100644 index 051344d02e878..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.start.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-watcher.start":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-start.html", - "description":"Starts Watcher if it is not already running." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/_start", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stats.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stats.json deleted file mode 100644 index f9dfd25f1e972..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stats.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "xpack-watcher.stats":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-stats.html", - "description":"Retrieves the current Watcher metrics." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/stats", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/watcher/stats/{metric}", - "methods":[ - "GET" - ], - "parts":{ - "metric":{ - "type":"list", - "options":[ - "_all", - "queued_watches", - "current_watches", - "pending_watches" - ], - "description":"Controls what additional stat metrics should be include in the response" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "metric":{ - "type":"list", - "options":[ - "_all", - "queued_watches", - "current_watches", - "pending_watches" - ], - "description":"Controls what additional stat metrics should be include in the response" - }, - "emit_stacktraces":{ - "type":"boolean", - "description":"Emits stack traces of currently running watches", - "required":false - } - } - } -} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stop.json b/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stop.json deleted file mode 100644 index c31bb162f6015..0000000000000 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-watcher.stop.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-watcher.stop":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-stop.html", - "description":"Stops Watcher if it is running." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/watcher/_stop", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 578fc90bd3e5b..7abce10a82f3c 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -106,7 +105,6 @@ static SearchExecutionContext createMockSearchExecutionContext(boolean allowExpe static final int MAX_FIELD_LENGTH = 30; static WildcardFieldMapper wildcardFieldType; - static WildcardFieldMapper wildcardFieldType79; static KeywordFieldMapper keywordFieldType; private DirectoryReader rewriteReader; private BaseDirectoryWrapper rewriteDir; @@ -128,9 +126,6 @@ public void setUp() throws Exception { builder.ignoreAbove(MAX_FIELD_LENGTH); wildcardFieldType = builder.build(MapperBuilderContext.root(false, false)); - Builder builder79 = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersions.V_7_9_0); - wildcardFieldType79 = builder79.build(MapperBuilderContext.root(false, false)); - org.elasticsearch.index.mapper.KeywordFieldMapper.Builder kwBuilder = new KeywordFieldMapper.Builder( KEYWORD_FIELD_NAME, IndexVersion.current() @@ -211,37 +206,6 @@ public void testIgnoreAbove() throws IOException { assertTrue(fields.stream().anyMatch(field -> "field".equals(field.stringValue()))); } - public void testBWCIndexVersion() throws IOException { - // Create old format index using wildcard ngram analyzer used in 7.9 launch - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(WildcardFieldMapper.WILDCARD_ANALYZER_7_9); - iwc.setMergePolicy(newTieredMergePolicy(random())); - RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); - - Document doc = new Document(); - LuceneDocument parseDoc = new LuceneDocument(); - addFields(parseDoc, doc, "a b"); - indexDoc(parseDoc, doc, iw); - - iw.forceMerge(1); - DirectoryReader reader = iw.getReader(); - IndexSearcher searcher = newSearcher(reader); - iw.close(); - - // Unnatural circumstance - testing we fail if we were to use the new analyzer on old index - Query oldWildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("a b", null, null); - TopDocs oldWildcardFieldTopDocs = searcher.search(oldWildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(oldWildcardFieldTopDocs.totalHits.value, equalTo(0L)); - - // Natural circumstance test we revert to the old analyzer for old indices - Query wildcardFieldQuery = wildcardFieldType79.fieldType().wildcardQuery("a b", null, null); - TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); - - reader.close(); - dir.close(); - } - // Test long query strings don't cause exceptions public void testTooBigQueryField() throws IOException { Directory dir = newDirectory(); diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index c8659aa0753f4..1abf6662a1b8b 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -66,7 +66,7 @@ if (OS.current() == OS.WINDOWS) { }); int currentMajorVersion = org.elasticsearch.gradle.VersionProperties.elasticsearchVersion.major - assert (currentMajorVersion - 2) == 6 : "add archive BWC tests for major version " + (currentMajorVersion - 2) + assert (currentMajorVersion - 2) == 7 : "add archive BWC tests for major version " + (currentMajorVersion - 2) for (String versionString : ['5.0.0', '5.6.16', '6.0.0', '6.8.20']) { Version version = Version.fromString(versionString) String packageName = 'org.elasticsearch.distribution.zip' diff --git a/x-pack/qa/xpack-prefix-rest-compat/build.gradle b/x-pack/qa/xpack-prefix-rest-compat/build.gradle deleted file mode 100644 index 8b91aae21ff73..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/build.gradle +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - - -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.VersionProperties - -apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - -/** - * This project exists to test the _xpack prefix for REST compatibility. The _xpack prefix was removed from the specification, but still supported - * in 7x. This project re-introduces the _xpack prefix in the specification but only for compatibility testing purposes. - */ - -configurations { - compatXpackTests -} - -int compatVersion = VersionProperties.getElasticsearchVersion().getMajor() - 1; - -dependencies { - "yamlRestTestV${compatVersion}CompatImplementation" project(':test:framework') - "yamlRestTestV${compatVersion}CompatImplementation"(testArtifact(project(':x-pack:plugin'))) - compatXpackTests project(path: ':x-pack:plugin', configuration: 'restCompatTests') -} - -// copy the already transformed compatible rest tests from the x-pack compatible tests -tasks.named("copyRestCompatTestTask").configure { task -> - task.dependsOn(configurations.compatXpackTests); - task.setXpackConfig(configurations.compatXpackTests); - task.getIncludeXpack().set(List.of("license", "migration", "ml", "rollup", "sql", "ssl")); - def fileOperations = task.getFileOperations() - task.getOutputResourceDir().set(project.getLayout().getBuildDirectory().dir("restResources/v${compatVersion}/yamlTests/original")) - task.setXpackConfigToFileTree( - config -> fileOperations.fileTree( - config.getSingleFile() - ) - ) - } - -// location for keys and certificates -File extraResourceDir = file("$buildDir/extra_resource") -File nodeKey = file("$extraResourceDir/testnode.pem") -File nodeCert = file("$extraResourceDir/testnode.crt") -// location for service tokens -File serviceTokens = file("$extraResourceDir/service_tokens") - -// Add key and certs to test classpath: it expects them there -// User cert and key PEM files instead of a JKS Keystore for the cluster's trust material so that -// it can run in a FIPS 140 JVM -// TODO: Remove all existing uses of cross project file references when the new approach for referencing static files is available -// https://github.com/elastic/elasticsearch/pull/32201 -def copyExtraResources = tasks.register("copyExtraResources", Copy) { - from(project(':x-pack:plugin:core').file('src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/')) { - include 'testnode.crt', 'testnode.pem' - } - from(project(':x-pack:plugin:security:qa:service-account').file('src/javaRestTest/resources/')) { - include 'service_tokens' - } - into extraResourceDir -} -// Add keystores to test classpath: it expects it there -sourceSets."yamlRestTestV${compatVersion}Compat".resources.compiledBy(copyExtraResources, t -> { - def dirProp = project.objects.directoryProperty() - dirProp.set(t.destinationDir) - return dirProp; -}) - -tasks.named("processYamlRestTestV${compatVersion}CompatResources").configure { - dependsOn("copyExtraResources") -} - -testClusters.configureEach { - testDistribution = 'DEFAULT' // this is important since we use the reindex module in ML - setting 'xpack.ml.enabled', 'true' - setting 'xpack.security.enabled', 'true' - setting 'xpack.watcher.enabled', 'false' - setting 'xpack.security.authc.token.enabled', 'true' - setting 'xpack.security.authc.api_key.enabled', 'true' - setting 'xpack.security.transport.ssl.enabled', 'true' - setting 'xpack.security.transport.ssl.key', nodeKey.name - setting 'xpack.security.transport.ssl.certificate', nodeCert.name - setting 'xpack.security.transport.ssl.verification_mode', 'certificate' - setting 'xpack.security.audit.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - // disable ILM history, since it disturbs tests using _all - setting 'indices.lifecycle.history_index_enabled', 'false' - keystore 'bootstrap.password', 'x-pack-test-password' - keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - - user username: "x_pack_rest_user", password: "x-pack-test-password" - extraConfigFile nodeKey.name, nodeKey - extraConfigFile nodeCert.name, nodeCert - extraConfigFile serviceTokens.name, serviceTokens - - requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") -} - -// transform (again) the (already) transformed x-pack compatibility tests to test the xpack prefixes -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - - task.replaceKeyInDo("license.delete", "xpack-license.delete") - task.replaceKeyInDo("license.get", "xpack-license.get") - task.replaceKeyInDo("license.get_basic_status", "xpack-license.get_basic_status") - task.replaceKeyInDo("license.get_trial_status", "xpack-license.get_trial_status") - task.replaceKeyInDo("license.post", "xpack-license.post") - task.replaceKeyInDo("license.post_start_basic", "xpack-license.post_start_basic") - task.replaceKeyInDo("license.post_start_trial", "xpack-license.post_start_trial") - task.addAllowedWarningRegex(".*_xpack/license.* is deprecated.*") - - task.replaceKeyInDo("migration.deprecations", "xpack-migration.deprecations") - task.addAllowedWarningRegex(".*_xpack/migration.* is deprecated.*") - - task.replaceKeyInDo("ml.close_job", "xpack-ml.close_job") - task.replaceKeyInDo("ml.delete_calendar", "xpack-ml.delete_calendar") - task.replaceKeyInDo("ml.delete_calendar_event", "xpack-ml.delete_calendar_event") - task.replaceKeyInDo("ml.delete_calendar_job", "xpack-ml.delete_calendar_job") - task.replaceKeyInDo("ml.delete_datafeed", "xpack-ml.delete_datafeed") - task.replaceKeyInDo("ml.delete_expired_data", "xpack-ml.delete_expired_data") - task.replaceKeyInDo("ml.delete_filter", "xpack-ml.delete_filter") - task.replaceKeyInDo("ml.delete_forecast", "xpack-ml.delete_forecast") - task.replaceKeyInDo("ml.delete_job", "xpack-ml.delete_job") - task.replaceKeyInDo("ml.delete_model_snapshot", "xpack-ml.delete_model_snapshot") - task.replaceKeyInDo("ml.flush_job", "xpack-ml.flush_job") - task.replaceKeyInDo("ml.forecast", "xpack-ml.forecast") - task.replaceKeyInDo("ml.get_buckets", "xpack-ml.get_buckets") - task.replaceKeyInDo("ml.get_calendar_events", "xpack-ml.get_calendar_events") - task.replaceKeyInDo("ml.get_calendars", "xpack-ml.get_calendars") - task.replaceKeyInDo("ml.get_categories", "xpack-ml.get_categories") - task.replaceKeyInDo("ml.get_datafeed_stats", "xpack-ml.get_datafeed_stats") - task.replaceKeyInDo("ml.get_datafeeds", "xpack-ml.get_datafeeds") - task.replaceKeyInDo("ml.get_filters", "xpack-ml.get_filters") - task.replaceKeyInDo("ml.get_influencers", "xpack-ml.get_influencers") - task.replaceKeyInDo("ml.get_job_stats", "xpack-ml.get_job_stats") - task.replaceKeyInDo("ml.get_jobs", "xpack-ml.get_jobs") - task.replaceKeyInDo("ml.get_model_snapshots", "xpack-ml.get_model_snapshots") - task.replaceKeyInDo("ml.get_overall_buckets", "xpack-ml.get_overall_buckets") - task.replaceKeyInDo("ml.get_records", "xpack-ml.get_records") - task.replaceKeyInDo("ml.info", "xpack-ml.info") - task.replaceKeyInDo("ml.open_job", "xpack-ml.open_job") - task.replaceKeyInDo("ml.post_calendar_events", "xpack-ml.post_calendar_events") - task.replaceKeyInDo("ml.post_data", "xpack-ml.post_data") - task.replaceKeyInDo("ml.preview_datafeed", "xpack-ml.preview_datafeed") - task.replaceKeyInDo("ml.put_calendar", "xpack-ml.put_calendar") - task.replaceKeyInDo("ml.put_calendar_job", "xpack-ml.put_calendar_job") - task.replaceKeyInDo("ml.put_datafeed", "xpack-ml.put_datafeed") - task.replaceKeyInDo("ml.put_filter", "xpack-ml.put_filter") - task.replaceKeyInDo("ml.put_job", "xpack-ml.put_job") - task.replaceKeyInDo("ml.revert_model_snapshot", "xpack-ml.revert_model_snapshot") - task.replaceKeyInDo("ml.set_upgrade_mode", "xpack-ml.set_upgrade_mode") - task.replaceKeyInDo("ml.start_datafeed", "xpack-ml.start_datafeed") - task.replaceKeyInDo("ml.stop_datafeed", "xpack-ml.stop_datafeed") - task.replaceKeyInDo("ml.update_datafeed", "xpack-ml.update_datafeed") - task.replaceKeyInDo("ml.update_filter", "xpack-ml.update_filter") - task.replaceKeyInDo("ml.update_job", "xpack-ml.update_job") - task.replaceKeyInDo("ml.update_model_snapshot", "xpack-ml.update_model_snapshot") - task.replaceKeyInDo("ml.validate", "xpack-ml.validate") - task.replaceKeyInDo("ml.validate_detector", "xpack-ml.validate_detector") - task.addAllowedWarningRegex(".*_xpack/ml.* is deprecated.*") - task.addAllowedWarningRegex("bucket_span .* is not an integral .* of the number of sconds in 1d.* This is now deprecated.*") - - task.replaceKeyInDo("rollup.delete_job", "xpack-rollup.delete_job") - task.replaceKeyInDo("rollup.get_jobs", "xpack-rollup.get_jobs") - task.replaceKeyInDo("rollup.get_rollup_caps", "xpack-rollup.get_rollup_caps") - task.replaceKeyInDo("rollup.get_rollup_index_caps", "xpack-rollup.get_rollup_index_caps") - task.replaceKeyInDo("rollup.put_job", "xpack-rollup.put_job") - task.replaceKeyInDo("rollup.start_job", "xpack-rollup.start_job") - task.replaceKeyInDo("rollup.stop_job", "xpack-rollup.stop_job") - task.addAllowedWarningRegex(".*_xpack/rollup.* is deprecated.*") - - task.replaceKeyInDo("sql.clear_cursor", "xpack-sql.clear_cursor") - task.replaceKeyInDo("sql.query", "xpack-sql.query") - task.replaceKeyInDo("sql.translate", "xpack-sql.translate") - task.addAllowedWarningRegex(".*_xpack/sql.* is deprecated.*") - - task.replaceKeyInDo("ssl.certificates", "xpack-ssl.certificates", "Test get SSL certificates") - task.addAllowedWarningRegexForTest(".*_xpack/ssl.* is deprecated.*", "Test get SSL certificates") -} - -tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'ml/evaluate_data_frame/Test classification auc_roc', - 'ml/evaluate_data_frame/Test classification auc_roc with default top_classes_field', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc include curve', - 'ml/evaluate_data_frame/Test outlier_detection auc_roc given actual_field is int', - ].join(',') -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java deleted file mode 100644 index 3e060d2875e8a..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.test.rest; - -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; - -public class XPackRestIT extends AbstractXPackRestTest { - - public XPackRestIT(ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(); - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.delete.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.delete.json deleted file mode 100644 index 3a20798a81482..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.delete.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "xpack-license.delete":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-license.html", - "description":"Deletes licensing information for the cluster" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license", - "methods":[ - "DELETE" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get.json deleted file mode 100644 index e5c89084c3759..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-license.get":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-license.html", - "description":"Retrieves licensing information for the cluster" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "local":{ - "type":"boolean", - "description":"Return local information, do not retrieve the state from master node (default: false)" - }, - "accept_enterprise":{ - "type":"boolean", - "description":"Supported for backwards compatibility with 7.x. If this param is used it must be set to true", - "deprecated":true - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_basic_status.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_basic_status.json deleted file mode 100644 index f0808e45d2da8..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_basic_status.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-license.get_basic_status":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-basic-status.html", - "description":"Retrieves information about the status of the basic license." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license/basic_status", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_trial_status.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_trial_status.json deleted file mode 100644 index 8ccde8365830f..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.get_trial_status.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-license.get_trial_status":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/get-trial-status.html", - "description":"Retrieves information about the status of the trial license." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license/trial_status", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post.json deleted file mode 100644 index 8db5533bbad9e..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "xpack-license.post":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/update-license.html", - "description":"Updates the license for the cluster." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license", - "methods":[ - "PUT", - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "acknowledge":{ - "type":"boolean", - "description":"whether the user has acknowledged acknowledge messages (default: false)" - }, - "master_timeout": { - "type": "time", - "description": "Timeout for processing on master node" - }, - "timeout": { - "type": "time", - "description": "Timeout for acknowledgement of update from all nodes in cluster" - } - }, - "body":{ - "description":"licenses to be installed" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_basic.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_basic.json deleted file mode 100644 index 4589ed26386ce..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_basic.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-license.post_start_basic":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/start-basic.html", - "description":"Starts an indefinite basic license." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license/start_basic", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "acknowledge":{ - "type":"boolean", - "description":"whether the user has acknowledged acknowledge messages (default: false)" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_trial.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_trial.json deleted file mode 100644 index 225034d397ef7..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-license.post_start_trial.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-license.post_start_trial":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/start-trial.html", - "description":"starts a limited time trial license." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/license/start_trial", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "type":{ - "type":"string", - "description":"The type of trial license to generate (default: \"trial\")" - }, - "acknowledge":{ - "type":"boolean", - "description":"whether the user has acknowledged acknowledge messages (default: false)" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-migration.deprecations.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-migration.deprecations.json deleted file mode 100644 index 05885f8458770..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-migration.deprecations.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "xpack-migration.deprecations":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/migration-api-deprecation.html", - "description":"Retrieves information about different cluster, node, and index level settings that use deprecated features that will be removed or changed in the next major version." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/migration/deprecations", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/{index}/_xpack/migration/deprecations", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"string", - "description":"Index pattern" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.close_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.close_job.json deleted file mode 100644 index afbf2591e10f9..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.close_job.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "xpack-ml.close_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html", - "description":"Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_close", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The name of the job to close" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)" - }, - "allow_no_jobs":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)", - "deprecated":true - }, - "force":{ - "type":"boolean", - "required":false, - "description":"True if the job should be forcefully closed" - }, - "timeout":{ - "type":"time", - "description":"Controls the time to wait until a job has closed. Default to 30 minutes" - } - }, - "body":{ - "description":"The URL params optionally sent in the body", - "required":false - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar.json deleted file mode 100644 index 7965921a5ea21..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-ml.delete_calendar":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar.html", - "description":"Deletes a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_event.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_event.json deleted file mode 100644 index 7584b2512a8b3..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_event.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-ml.delete_calendar_event":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-event.html", - "description":"Deletes scheduled events from a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}/events/{event_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to modify" - }, - "event_id":{ - "type":"string", - "description":"The ID of the event to remove from the calendar" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_job.json deleted file mode 100644 index bdada60699df0..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_calendar_job.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-ml.delete_calendar_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html", - "description":"Deletes anomaly detection jobs from a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}/jobs/{job_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to modify" - }, - "job_id":{ - "type":"string", - "description":"The ID of the job to remove from the calendar" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_datafeed.json deleted file mode 100644 index 6743b3eadff91..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_datafeed.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "xpack-ml.delete_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html", - "description":"Deletes an existing datafeed." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "force":{ - "type":"boolean", - "required":false, - "description":"True if the datafeed should be forcefully deleted" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_expired_data.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_expired_data.json deleted file mode 100644 index 0520055d968d1..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_expired_data.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "xpack-ml.delete_expired_data":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-expired-data.html", - "description":"Deletes expired and unused machine learning data." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/_delete_expired_data/{job_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job(s) to perform expired data hygiene for" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/_delete_expired_data", - "methods":[ - "DELETE" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "requests_per_second":{ - "type":"number", - "required":false, - "description":"The desired requests per second for the deletion processes." - }, - "timeout":{ - "type":"time", - "required":false, - "description":"How long can the underlying delete processes run until they are canceled" - } - }, - "body":{ - "description":"deleting expired data parameters" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_filter.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_filter.json deleted file mode 100644 index c9c4f76e15f53..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_filter.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-ml.delete_filter":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html", - "description":"Deletes a filter." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/filters/{filter_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "filter_id":{ - "type":"string", - "description":"The ID of the filter to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_forecast.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_forecast.json deleted file mode 100644 index 4c647046c3704..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_forecast.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "xpack-ml.delete_forecast":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html", - "description":"Deletes forecasts from a machine learning job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_forecast", - "methods":[ - "DELETE" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job from which to delete forecasts" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_forecast/{forecast_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job from which to delete forecasts" - }, - "forecast_id":{ - "type":"string", - "description":"The ID of the forecast to delete, can be comma delimited list. Leaving blank implies `_all`" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_forecasts":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if `_all` matches no forecasts" - }, - "timeout":{ - "type":"time", - "required":false, - "description":"Controls the time to wait until the forecast(s) are deleted. Default to 30 seconds" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_job.json deleted file mode 100644 index 9043baacb781e..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_job.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "xpack-ml.delete_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html", - "description":"Deletes an existing anomaly detection job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "force":{ - "type":"boolean", - "description":"True if the job should be forcefully deleted", - "default":false - }, - "wait_for_completion":{ - "type":"boolean", - "description":"Should this request wait until the operation has completed before returning", - "default":true - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_model_snapshot.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_model_snapshot.json deleted file mode 100644 index e3f1dd2036602..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.delete_model_snapshot.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-ml.delete_model_snapshot":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html", - "description":"Deletes an existing model snapshot." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/model_snapshots/{snapshot_id}", - "methods":[ - "DELETE" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to fetch" - }, - "snapshot_id":{ - "type":"string", - "description":"The ID of the snapshot to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.flush_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.flush_job.json deleted file mode 100644 index 801258194d28b..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.flush_job.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "xpack-ml.flush_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html", - "description":"Forces any buffered data to be processed by the job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_flush", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The name of the job to flush" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "calc_interim":{ - "type":"boolean", - "description":"Calculates interim results for the most recent bucket or all buckets within the latency period" - }, - "start":{ - "type":"string", - "description":"When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results" - }, - "end":{ - "type":"string", - "description":"When used in conjunction with calc_interim, specifies the range of buckets on which to calculate interim results" - }, - "advance_time":{ - "type":"string", - "description":"Advances time to the given value generating results and updating the model for the advanced interval" - }, - "skip_time":{ - "type":"string", - "description":"Skips time to the given value without generating results or updating the model for the skipped interval" - } - }, - "body":{ - "description":"Flush parameters" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.forecast.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.forecast.json deleted file mode 100644 index f86d1d6f6d943..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.forecast.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "xpack-ml.forecast":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-forecast.html", - "description":"Predicts the future behavior of a time series by using its historical behavior." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_forecast", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to forecast for" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "duration":{ - "type":"time", - "required":false, - "description":"The duration of the forecast" - }, - "expires_in":{ - "type":"time", - "required":false, - "description":"The time interval after which the forecast expires. Expired forecasts will be deleted at the first opportunity." - }, - "max_model_memory":{ - "type":"string", - "required":false, - "description":"The max memory able to be used by the forecast. Default is 20mb." - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_buckets.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_buckets.json deleted file mode 100644 index 0ccc693b5b682..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_buckets.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "xpack-ml.get_buckets":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html", - "description":"Retrieves anomaly detection job results for one or more buckets." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/buckets/{timestamp}", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"ID of the job to get bucket results from" - }, - "timestamp":{ - "type":"string", - "description":"The timestamp of the desired single bucket result" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/buckets", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"ID of the job to get bucket results from" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "expand":{ - "type":"boolean", - "description":"Include anomaly records" - }, - "exclude_interim":{ - "type":"boolean", - "description":"Exclude interim results" - }, - "from":{ - "type":"int", - "description":"skips a number of buckets" - }, - "size":{ - "type":"int", - "description":"specifies a max number of buckets to get" - }, - "start":{ - "type":"string", - "description":"Start time filter for buckets" - }, - "end":{ - "type":"string", - "description":"End time filter for buckets" - }, - "anomaly_score":{ - "type":"double", - "description":"Filter for the most anomalous buckets" - }, - "sort":{ - "type":"string", - "description":"Sort buckets by a particular field" - }, - "desc":{ - "type":"boolean", - "description":"Set the sort direction" - } - }, - "body":{ - "description":"Bucket selection details if not provided in URI" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendar_events.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendar_events.json deleted file mode 100644 index 731c510767488..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendar_events.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "xpack-ml.get_calendar_events":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar-event.html", - "description":"Retrieves information about the scheduled events in calendars." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}/events", - "methods":[ - "GET" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar containing the events" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "job_id":{ - "type":"string", - "description":"Get events for the job. When this option is used calendar_id must be '_all'" - }, - "start":{ - "type":"string", - "description":"Get events after this time" - }, - "end":{ - "type":"date", - "description":"Get events before this time" - }, - "from":{ - "type":"int", - "description":"Skips a number of events" - }, - "size":{ - "type":"int", - "description":"Specifies a max number of events to get" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendars.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendars.json deleted file mode 100644 index c80005451ff0b..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_calendars.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "xpack-ml.get_calendars":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-calendar.html", - "description":"Retrieves configuration information for calendars." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars", - "methods":[ - "GET", - "POST" - ] - }, - { - "path":"/_xpack/ml/calendars/{calendar_id}", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "from":{ - "type":"int", - "description":"skips a number of calendars" - }, - "size":{ - "type":"int", - "description":"specifies a max number of calendars to get" - } - }, - "body":{ - "description":"The from and size parameters optionally sent in the body" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_categories.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_categories.json deleted file mode 100644 index 4fce55f682248..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_categories.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "xpack-ml.get_categories":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html", - "description":"Retrieves anomaly detection job results for one or more categories." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/categories/{category_id}", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The name of the job" - }, - "category_id":{ - "type":"long", - "description":"The identifier of the category definition of interest" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/categories", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The name of the job" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "from":{ - "type":"int", - "description":"skips a number of categories" - }, - "size":{ - "type":"int", - "description":"specifies a max number of categories to get" - }, - "partition_field_value":{ - "type":"string", - "description":"Specifies the partition to retrieve categories for. This is optional, and should never be used for jobs where per-partition categorization is disabled." - } - }, - "body":{ - "description":"Category selection details if not provided in URI" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeed_stats.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeed_stats.json deleted file mode 100644 index 1c63df2a33fef..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeed_stats.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "xpack-ml.get_datafeed_stats":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html", - "description":"Retrieves usage information for datafeeds." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}/_stats", - "methods":[ - "GET" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeeds stats to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/datafeeds/_stats", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)" - }, - "allow_no_datafeeds":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)", - "deprecated":true - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeeds.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeeds.json deleted file mode 100644 index 14f8a3f65397d..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_datafeeds.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "xpack-ml.get_datafeeds":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html", - "description":"Retrieves configuration information for datafeeds." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}", - "methods":[ - "GET" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeeds to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/datafeeds", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)" - }, - "allow_no_datafeeds":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)", - "deprecated":true - }, - "exclude_generated": { - "required": false, - "type": "boolean", - "default": false, - "description": "Omits fields that are illegal to set on datafeed PUT" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_filters.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_filters.json deleted file mode 100644 index 886f289a75096..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_filters.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "xpack-ml.get_filters":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-filter.html", - "description":"Retrieves filters." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/filters", - "methods":[ - "GET" - ] - }, - { - "path":"/_xpack/ml/filters/{filter_id}", - "methods":[ - "GET" - ], - "parts":{ - "filter_id":{ - "type":"string", - "description":"The ID of the filter to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "from":{ - "type":"int", - "description":"skips a number of filters" - }, - "size":{ - "type":"int", - "description":"specifies a max number of filters to get" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_influencers.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_influencers.json deleted file mode 100644 index d039f14175f09..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_influencers.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "xpack-ml.get_influencers":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html", - "description":"Retrieves anomaly detection job results for one or more influencers." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/influencers", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"Identifier for the anomaly detection job" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "exclude_interim":{ - "type":"boolean", - "description":"Exclude interim results" - }, - "from":{ - "type":"int", - "description":"skips a number of influencers" - }, - "size":{ - "type":"int", - "description":"specifies a max number of influencers to get" - }, - "start":{ - "type":"string", - "description":"start timestamp for the requested influencers" - }, - "end":{ - "type":"string", - "description":"end timestamp for the requested influencers" - }, - "influencer_score":{ - "type":"double", - "description":"influencer score threshold for the requested influencers" - }, - "sort":{ - "type":"string", - "description":"sort field for the requested influencers" - }, - "desc":{ - "type":"boolean", - "description":"whether the results should be sorted in decending order" - } - }, - "body":{ - "description":"Influencer selection criteria" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_job_stats.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_job_stats.json deleted file mode 100644 index ea6154916c155..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_job_stats.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "xpack-ml.get_job_stats":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html", - "description":"Retrieves usage information for anomaly detection jobs." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/_stats", - "methods":[ - "GET" - ] - }, - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_stats", - "methods":[ - "GET" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the jobs stats to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)" - }, - "allow_no_jobs":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)", - "deprecated":true - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_jobs.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_jobs.json deleted file mode 100644 index 70fb374b846fe..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_jobs.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "xpack-ml.get_jobs":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html", - "description":"Retrieves configuration information for anomaly detection jobs." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}", - "methods":[ - "GET" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the jobs to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/anomaly_detectors", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)" - }, - "allow_no_jobs":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)", - "deprecated":true - }, - "exclude_generated": { - "required": false, - "type": "boolean", - "default": false, - "description": "Omits fields that are illegal to set on job PUT" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_model_snapshots.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_model_snapshots.json deleted file mode 100644 index 44120f655960e..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_model_snapshots.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "xpack-ml.get_model_snapshots":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-snapshot.html", - "description":"Retrieves information about model snapshots." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/model_snapshots/{snapshot_id}", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to fetch" - }, - "snapshot_id":{ - "type":"string", - "description":"The ID of the snapshot to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/model_snapshots", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to fetch" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "from":{ - "type":"int", - "description":"Skips a number of documents" - }, - "size":{ - "type":"int", - "description":"The default number of documents returned in queries as a string." - }, - "start":{ - "type":"date", - "description":"The filter 'start' query parameter" - }, - "end":{ - "type":"date", - "description":"The filter 'end' query parameter" - }, - "sort":{ - "type":"string", - "description":"Name of the field to sort on" - }, - "desc":{ - "type":"boolean", - "description":"True if the results should be sorted in descending order" - } - }, - "body":{ - "description":"Model snapshot selection criteria" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_overall_buckets.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_overall_buckets.json deleted file mode 100644 index d18e0c1be3dfa..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_overall_buckets.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "xpack-ml.get_overall_buckets":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html", - "description":"Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/overall_buckets", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The job IDs for which to calculate overall bucket results" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "top_n":{ - "type":"int", - "description":"The number of top job bucket scores to be used in the overall_score calculation" - }, - "bucket_span":{ - "type":"string", - "description":"The span of the overall buckets. Defaults to the longest job bucket_span" - }, - "overall_score":{ - "type":"double", - "description":"Returns overall buckets with overall scores higher than this value" - }, - "exclude_interim":{ - "type":"boolean", - "description":"If true overall buckets that include interim buckets will be excluded" - }, - "start":{ - "type":"string", - "description":"Returns overall buckets with timestamps after this time" - }, - "end":{ - "type":"string", - "description":"Returns overall buckets with timestamps earlier than this time" - }, - "allow_no_match":{ - "type":"boolean", - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)" - }, - "allow_no_jobs":{ - "type":"boolean", - "description":"Whether to ignore if a wildcard expression matches no jobs. (This includes `_all` string or when no jobs have been specified)", - "deprecated":true - } - }, - "body":{ - "description":"Overall bucket selection details if not provided in URI" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_records.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_records.json deleted file mode 100644 index a930001a0e319..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.get_records.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "xpack-ml.get_records":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html", - "description":"Retrieves anomaly records for an anomaly detection job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/results/records", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "exclude_interim":{ - "type":"boolean", - "description":"Exclude interim results" - }, - "from":{ - "type":"int", - "description":"skips a number of records" - }, - "size":{ - "type":"int", - "description":"specifies a max number of records to get" - }, - "start":{ - "type":"string", - "description":"Start time filter for records" - }, - "end":{ - "type":"string", - "description":"End time filter for records" - }, - "record_score":{ - "type":"double", - "description":"Returns records with anomaly scores greater or equal than this value" - }, - "sort":{ - "type":"string", - "description":"Sort records by a particular field" - }, - "desc":{ - "type":"boolean", - "description":"Set the sort direction" - } - }, - "body":{ - "description":"Record selection criteria" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.info.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.info.json deleted file mode 100644 index a21bf7986ac20..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.info.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "xpack-ml.info":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-info.html", - "description":"Returns defaults and limits used by machine learning." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/info", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.open_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.open_job.json deleted file mode 100644 index e464c3743695e..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.open_job.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-ml.open_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html", - "description":"Opens one or more anomaly detection jobs." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_open", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to open" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_calendar_events.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_calendar_events.json deleted file mode 100644 index ca754b25a0d06..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_calendar_events.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-ml.post_calendar_events":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html", - "description":"Posts scheduled events in a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}/events", - "methods":[ - "POST" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to modify" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"A list of events", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_data.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_data.json deleted file mode 100644 index 21d4687e54119..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.post_data.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "xpack-ml.post_data":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html", - "description":"Sends data to an anomaly detection job for analysis." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/x-ndjson", "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_data", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The name of the job receiving the data" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "reset_start":{ - "type":"string", - "description":"Optional parameter to specify the start of the bucket resetting range" - }, - "reset_end":{ - "type":"string", - "description":"Optional parameter to specify the end of the bucket resetting range" - } - }, - "body":{ - "description":"The data to process", - "required":true, - "serialize":"bulk" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.preview_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.preview_datafeed.json deleted file mode 100644 index c14869ff12426..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.preview_datafeed.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "xpack-ml.preview_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html", - "description":"Previews a datafeed." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}/_preview", - "methods":[ - "GET", - "POST" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to preview" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/ml/datafeeds/_preview", - "methods":[ - "GET", - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The datafeed config and job config with which to execute the preview", - "required":false - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar.json deleted file mode 100644 index d44674646e71d..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-ml.put_calendar":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html", - "description":"Instantiates a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}", - "methods":[ - "PUT" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The calendar details", - "required":false - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar_job.json deleted file mode 100644 index 3351ba74cf961..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_calendar_job.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-ml.put_calendar_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar-job.html", - "description":"Adds an anomaly detection job to a calendar." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/calendars/{calendar_id}/jobs/{job_id}", - "methods":[ - "PUT" - ], - "parts":{ - "calendar_id":{ - "type":"string", - "description":"The ID of the calendar to modify" - }, - "job_id":{ - "type":"string", - "description":"The ID of the job to add to the calendar" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_datafeed.json deleted file mode 100644 index 3e2700899214b..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_datafeed.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "xpack-ml.put_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html", - "description":"Instantiates a datafeed." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}", - "methods":[ - "PUT" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The datafeed config", - "required":true - }, - "params":{ - "ignore_unavailable":{ - "type":"boolean", - "description":"Ignore unavailable indexes (default: false)" - }, - "allow_no_indices":{ - "type":"boolean", - "description":"Ignore if the source indices expressions resolves to no concrete indices (default: true)" - }, - "ignore_throttled":{ - "type":"boolean", - "description":"Ignore indices that are marked as throttled (default: true)" - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "description":"Whether source index expressions should get expanded to open or closed indices (default: open)" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_filter.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_filter.json deleted file mode 100644 index a36f748bb6bb5..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_filter.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-ml.put_filter":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-filter.html", - "description":"Instantiates a filter." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/filters/{filter_id}", - "methods":[ - "PUT" - ], - "parts":{ - "filter_id":{ - "type":"string", - "description":"The ID of the filter to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The filter details", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_job.json deleted file mode 100644 index 3c1798170969c..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.put_job.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "xpack-ml.put_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html", - "description":"Instantiates an anomaly detection job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}", - "methods":[ - "PUT" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "ignore_unavailable":{ - "type":"boolean", - "description":"Ignore unavailable indexes (default: false). Only set if datafeed_config is provided." - }, - "allow_no_indices":{ - "type":"boolean", - "description":"Ignore if the source indices expressions resolves to no concrete indices (default: true). Only set if datafeed_config is provided." - }, - "ignore_throttled":{ - "type":"boolean", - "description":"Ignore indices that are marked as throttled (default: true). Only set if datafeed_config is provided." - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "description":"Whether source index expressions should get expanded to open or closed indices (default: open). Only set if datafeed_config is provided." - } - }, - "body":{ - "description":"The job", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.revert_model_snapshot.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.revert_model_snapshot.json deleted file mode 100644 index 803595f9ec0b2..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.revert_model_snapshot.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "xpack-ml.revert_model_snapshot":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html", - "description":"Reverts to a specific snapshot." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/model_snapshots/{snapshot_id}/_revert", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to fetch" - }, - "snapshot_id":{ - "type":"string", - "description":"The ID of the snapshot to revert to" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "delete_intervening_results":{ - "type":"boolean", - "description":"Should we reset the results back to the time of the snapshot?" - } - }, - "body":{ - "description":"Reversion options" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.set_upgrade_mode.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.set_upgrade_mode.json deleted file mode 100644 index 1967d8a6262fd..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.set_upgrade_mode.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "xpack-ml.set_upgrade_mode":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-set-upgrade-mode.html", - "description":"Sets a cluster wide upgrade_mode setting that prepares machine learning indices for an upgrade." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/set_upgrade_mode", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "enabled":{ - "type":"boolean", - "description":"Whether to enable upgrade_mode ML setting or not. Defaults to false." - }, - "timeout":{ - "type":"time", - "description":"Controls the time to wait before action times out. Defaults to 30 seconds" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.start_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.start_datafeed.json deleted file mode 100644 index cef069eaa153d..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.start_datafeed.json +++ /dev/null @@ -1,54 +0,0 @@ -{ - "xpack-ml.start_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html", - "description":"Starts one or more datafeeds." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}/_start", - "methods":[ - "POST" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to start" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "start":{ - "type":"string", - "required":false, - "description":"The start time from where the datafeed should begin" - }, - "end":{ - "type":"string", - "required":false, - "description":"The end time when the datafeed should stop. When not set, the datafeed continues in real time" - }, - "timeout":{ - "type":"time", - "required":false, - "description":"Controls the time to wait until a datafeed has started. Default to 20 seconds" - } - }, - "body":{ - "description":"The start datafeed parameters" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.stop_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.stop_datafeed.json deleted file mode 100644 index b09d536f05b36..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.stop_datafeed.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "xpack-ml.stop_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html", - "description":"Stops one or more datafeeds." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}/_stop", - "methods":[ - "POST" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to stop" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "allow_no_match":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)" - }, - "allow_no_datafeeds":{ - "type":"boolean", - "required":false, - "description":"Whether to ignore if a wildcard expression matches no datafeeds. (This includes `_all` string or when no datafeeds have been specified)", - "deprecated":true - }, - "force":{ - "type":"boolean", - "required":false, - "description":"True if the datafeed should be forcefully stopped." - }, - "timeout":{ - "type":"time", - "required":false, - "description":"Controls the time to wait until a datafeed has stopped. Default to 20 seconds" - } - }, - "body":{ - "description":"The URL params optionally sent in the body", - "required":false - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_datafeed.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_datafeed.json deleted file mode 100644 index ccddff3dd011b..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_datafeed.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "xpack-ml.update_datafeed":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html", - "description":"Updates certain properties of a datafeed." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/datafeeds/{datafeed_id}/_update", - "methods":[ - "POST" - ], - "parts":{ - "datafeed_id":{ - "type":"string", - "description":"The ID of the datafeed to update" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The datafeed update settings", - "required":true - }, - "params":{ - "ignore_unavailable":{ - "type":"boolean", - "description":"Ignore unavailable indexes (default: false)" - }, - "allow_no_indices":{ - "type":"boolean", - "description":"Ignore if the source indices expressions resolves to no concrete indices (default: true)" - }, - "ignore_throttled":{ - "type":"boolean", - "description":"Ignore indices that are marked as throttled (default: true)" - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "description":"Whether source index expressions should get expanded to open or closed indices (default: open)" - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_filter.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_filter.json deleted file mode 100644 index 00e58675e0dbe..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_filter.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-ml.update_filter":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-filter.html", - "description":"Updates the description of a filter, adds items, or removes items." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/filters/{filter_id}/_update", - "methods":[ - "POST" - ], - "parts":{ - "filter_id":{ - "type":"string", - "description":"The ID of the filter to update" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The filter update", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_job.json deleted file mode 100644 index 62e79020ac764..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_job.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-ml.update_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html", - "description":"Updates certain properties of an anomaly detection job." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/_update", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The job update settings", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_model_snapshot.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_model_snapshot.json deleted file mode 100644 index cd72375d6ae4a..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.update_model_snapshot.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "xpack-ml.update_model_snapshot":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html", - "description":"Updates certain properties of a snapshot." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/{job_id}/model_snapshots/{snapshot_id}/_update", - "methods":[ - "POST" - ], - "parts":{ - "job_id":{ - "type":"string", - "description":"The ID of the job to fetch" - }, - "snapshot_id":{ - "type":"string", - "description":"The ID of the snapshot to update" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{}, - "body":{ - "description":"The model snapshot properties to update", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate.json deleted file mode 100644 index ad337c3c1ad82..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-ml.validate":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html", - "description":"Validates an anomaly detection job." - }, - "stability":"stable", - "visibility":"private", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/_validate", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{}, - "body":{ - "description":"The job config", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate_detector.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate_detector.json deleted file mode 100644 index 5a06df8977dfc..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ml.validate_detector.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-ml.validate_detector":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/machine-learning/current/ml-jobs.html", - "description":"Validates an anomaly detection detector." - }, - "stability":"stable", - "visibility":"private", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ml/anomaly_detectors/_validate/detector", - "methods":[ - "POST" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{}, - "body":{ - "description":"The detector", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-monitoring.bulk.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-monitoring.bulk.json deleted file mode 100644 index bf37993fb7862..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-monitoring.bulk.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "xpack-monitoring.bulk":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/monitor-elasticsearch-cluster.html", - "description":"Used by the monitoring features to send monitoring data." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/x-ndjson;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/monitoring/_bulk", - "methods":[ - "POST", - "PUT" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "system_id":{ - "type":"string", - "description":"Identifier of the monitored system" - }, - "system_api_version":{ - "type":"string", - "description":"API Version of the monitored system" - }, - "interval":{ - "type":"string", - "description":"Collection interval (e.g., '10s' or '10000ms') of the payload" - } - }, - "body":{ - "description":"The operation definition and data (action-data pairs), separated by newlines", - "required":true, - "serialize":"bulk" - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.delete_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.delete_job.json deleted file mode 100644 index 3c23fddd7e4fa..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.delete_job.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-rollup.delete_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-delete-job.html", - "description":"Deletes an existing rollup job." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/job/{id}", - "methods":[ - "DELETE" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the job to delete" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_jobs.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_jobs.json deleted file mode 100644 index 6b98664cf17ee..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_jobs.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "xpack-rollup.get_jobs":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-job.html", - "description":"Retrieves the configuration, stats, and status of rollup jobs." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/job/{id}", - "methods":[ - "GET" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the job(s) to fetch. Accepts glob patterns, or left blank for all jobs" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/rollup/job/", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_caps.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_caps.json deleted file mode 100644 index a8b95d00b6c3b..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_caps.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "xpack-rollup.get_rollup_caps":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-caps.html", - "description":"Returns the capabilities of any rollup jobs that have been configured for a specific index or index pattern." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/data/{id}", - "methods":[ - "GET" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the index to check rollup capabilities on, or left blank for all jobs" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - }, - { - "path":"/_xpack/rollup/data/", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_index_caps.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_index_caps.json deleted file mode 100644 index 38c9af891addb..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.get_rollup_index_caps.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-rollup.get_rollup_index_caps":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-get-rollup-index-caps.html", - "description":"Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored)." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/{index}/_xpack/rollup/data", - "methods":[ - "GET" - ], - "parts":{ - "index":{ - "type":"string", - "description":"The rollup index or index pattern to obtain rollup capabilities from." - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.put_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.put_job.json deleted file mode 100644 index 865a6f448193f..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.put_job.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "xpack-rollup.put_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-put-job.html", - "description":"Creates a rollup job." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/job/{id}", - "methods":[ - "PUT" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the job to create" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "body":{ - "description":"The job configuration", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.start_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.start_job.json deleted file mode 100644 index aebaf859ce9e2..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.start_job.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "xpack-rollup.start_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-start-job.html", - "description":"Starts an existing, stopped rollup job." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/job/{id}/_start", - "methods":[ - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the job to start" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.stop_job.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.stop_job.json deleted file mode 100644 index 62eada2313f2a..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-rollup.stop_job.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "xpack-rollup.stop_job":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/rollup-stop-job.html", - "description":"Stops an existing, started rollup job." - }, - "stability":"experimental", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/rollup/job/{id}/_stop", - "methods":[ - "POST" - ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the job to stop" - } - }, - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{ - "wait_for_completion":{ - "type":"boolean", - "required":false, - "description":"True if the API should block until the job has fully stopped, false if should be executed async. Defaults to false." - }, - "timeout":{ - "type":"time", - "required":false, - "description":"Block for (at maximum) the specified duration while waiting for the job to stop. Defaults to 30s." - } - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.clear_cursor.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.clear_cursor.json deleted file mode 100644 index ee706fc0736cd..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.clear_cursor.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-sql.clear_cursor":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-sql-cursor-api.html", - "description":"Clears the SQL cursor" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/sql/close", - "methods":[ - "POST" - ] - } - ] - }, - "body":{ - "description":"Specify the cursor value in the `cursor` element to clean the cursor.", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.query.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.query.json deleted file mode 100644 index d153e0f3484c3..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.query.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "xpack-sql.query":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-search-api.html", - "description":"Executes a SQL request" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/sql", - "methods":[ - "POST", - "GET" - ] - } - ] - }, - "params":{ - "format":{ - "type":"string", - "description":"a short version of the Accept header, e.g. json, yaml" - } - }, - "body":{ - "description":"Use the `query` element to start a query. Use the `cursor` element to continue a query.", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.translate.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.translate.json deleted file mode 100644 index 1fad4a643dbb9..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-sql.translate.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "xpack-sql.translate":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-translate-api.html", - "description":"Translates SQL into Elasticsearch queries" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/vnd.elasticsearch+json;compatible-with=7"], - "content_type": ["application/vnd.elasticsearch+json;compatible-with=7"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/sql/translate", - "methods":[ - "POST", - "GET" - ] - } - ] - }, - "params":{}, - "body":{ - "description":"Specify the query in the `query` element.", - "required":true - } - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ssl.certificates.json b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ssl.certificates.json deleted file mode 100644 index 7d25b0bf8f4f3..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/api/xpack-ssl.certificates.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "xpack-ssl.certificates":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-ssl.html", - "description":"Retrieves information about the X.509 certificates used to encrypt communications in the cluster." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_xpack/ssl/certificates", - "methods":[ - "GET" - ], - "deprecated":{ - "version":"7.0.0", - "description":"all _xpack prefix have been deprecated" - } - } - ] - }, - "params":{} - } -} diff --git a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/test/monitoring.bulk/10_basic_compat.yml b/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/test/monitoring.bulk/10_basic_compat.yml deleted file mode 100644 index a88f67b394783..0000000000000 --- a/x-pack/qa/xpack-prefix-rest-compat/src/yamlRestTestV7Compat/resources/rest-api-spec/test/monitoring.bulk/10_basic_compat.yml +++ /dev/null @@ -1,31 +0,0 @@ ---- -setup: - - requires: - test_runner_features: - - "headers" - - "warnings_regex" - ---- -"Bulk indexing of monitoring data": - - - do: - headers: - Content-Type: "application/vnd.elasticsearch+json;compatible-with=7" - Accept: "application/vnd.elasticsearch+json;compatible-with=7" - xpack-monitoring.bulk: - system_id: "kibana" - system_api_version: "6" - interval: "10s" - body: - - index: - _type: test_type - - avg-cpu: - user: 13.26 - nice: 0.17 - system: 1.51 - iowait: 0.85 - idle: 84.20 - warnings_regex: - - "\\[.* /_xpack/monitoring/_bulk\\] is deprecated! Use \\[.* /_monitoring/bulk\\] instead." - - - is_false: errors From ca2b144ba9d2237b0a2d88c82391e6e0b470e7ef Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 11 Sep 2024 15:40:40 -0400 Subject: [PATCH 35/58] ESQL: Compute support for filtering ungrouped aggs (#112717) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds support to the compute engine for filtering which positions are processed by ungrouping aggs. This should allow syntax like: ``` | STATS success = COUNT(*) WHERE 200 <= response_code AND response_code < 300, redirect = COUNT(*) WHERE 300 <= response_code AND response_code < 400, client_err = COUNT(*) WHERE 400 <= response_code AND response_code < 500, server_err = COUNT(*) WHERE 500 <= response_code AND response_code < 600, total_count = COUNT(*) ``` We could translate the WHERE expression into an `ExpressionEvaluator` and run it, then plug it into the filtering support added in this PR. The actual filtering is done by creating a `FilteredAggregatorFunction` which wraps a regular `AggregatorFunction` first executing the filter against the incoming `Page` and then passing the resulting mask to the `AggregatorFunction`. We've then added a `mask` to `AggregatorFunction#process` which each aggregation function must use for filtering. We keep the unfiltered behavior by sending a constant block with `true` in it. Each agg detects this and takes an "unfiltered" path, preserving the original performance. Importantly, when you don't turn this on it doesn't effect performance: ``` (blockType) (grouping) (op) Score Error -> Score Error Units vector_longs none count 0.007 ± 0.001 -> 0.007 ± 0.001 ns/op vector_longs none min 0.123 ± 0.004 -> 0.128 ± 0.005 ns/op vector_longs longs count 4.311 ± 0.192 -> 4.218 ± 0.053 ns/op vector_longs longs min 5.476 ± 0.077 -> 5.451 ± 0.074 ns/op ``` --- .../compute/gen/AggregatorImplementer.java | 51 ++++++++-- ...ountDistinctBooleanAggregatorFunction.java | 47 ++++++++- ...untDistinctBytesRefAggregatorFunction.java | 50 +++++++++- ...CountDistinctDoubleAggregatorFunction.java | 48 +++++++++- .../CountDistinctFloatAggregatorFunction.java | 48 +++++++++- .../CountDistinctIntAggregatorFunction.java | 48 +++++++++- .../CountDistinctLongAggregatorFunction.java | 48 +++++++++- .../MaxBooleanAggregatorFunction.java | 49 +++++++++- .../MaxBytesRefAggregatorFunction.java | 49 +++++++++- .../MaxDoubleAggregatorFunction.java | 49 +++++++++- .../MaxFloatAggregatorFunction.java | 49 +++++++++- .../aggregation/MaxIntAggregatorFunction.java | 49 +++++++++- .../aggregation/MaxIpAggregatorFunction.java | 49 +++++++++- .../MaxLongAggregatorFunction.java | 49 +++++++++- ...luteDeviationDoubleAggregatorFunction.java | 48 +++++++++- ...oluteDeviationFloatAggregatorFunction.java | 48 +++++++++- ...bsoluteDeviationIntAggregatorFunction.java | 48 +++++++++- ...soluteDeviationLongAggregatorFunction.java | 48 +++++++++- .../MinBooleanAggregatorFunction.java | 49 +++++++++- .../MinBytesRefAggregatorFunction.java | 49 +++++++++- .../MinDoubleAggregatorFunction.java | 49 +++++++++- .../MinFloatAggregatorFunction.java | 49 +++++++++- .../aggregation/MinIntAggregatorFunction.java | 49 +++++++++- .../aggregation/MinIpAggregatorFunction.java | 49 +++++++++- .../MinLongAggregatorFunction.java | 49 +++++++++- .../PercentileDoubleAggregatorFunction.java | 48 +++++++++- .../PercentileFloatAggregatorFunction.java | 48 +++++++++- .../PercentileIntAggregatorFunction.java | 48 +++++++++- .../PercentileLongAggregatorFunction.java | 48 +++++++++- .../SumDoubleAggregatorFunction.java | 49 +++++++++- .../SumFloatAggregatorFunction.java | 49 +++++++++- .../aggregation/SumIntAggregatorFunction.java | 49 +++++++++- .../SumLongAggregatorFunction.java | 49 +++++++++- .../TopBooleanAggregatorFunction.java | 47 ++++++++- .../TopDoubleAggregatorFunction.java | 48 +++++++++- .../TopFloatAggregatorFunction.java | 48 +++++++++- .../aggregation/TopIntAggregatorFunction.java | 48 +++++++++- .../aggregation/TopIpAggregatorFunction.java | 50 +++++++++- .../TopLongAggregatorFunction.java | 48 +++++++++- .../ValuesBooleanAggregatorFunction.java | 47 ++++++++- .../ValuesBytesRefAggregatorFunction.java | 50 +++++++++- .../ValuesDoubleAggregatorFunction.java | 48 +++++++++- .../ValuesFloatAggregatorFunction.java | 48 +++++++++- .../ValuesIntAggregatorFunction.java | 48 +++++++++- .../ValuesLongAggregatorFunction.java | 48 +++++++++- ...esianPointDocValuesAggregatorFunction.java | 48 +++++++++- ...anPointSourceValuesAggregatorFunction.java | 50 +++++++++- ...idGeoPointDocValuesAggregatorFunction.java | 48 +++++++++- ...eoPointSourceValuesAggregatorFunction.java | 50 +++++++++- .../compute/aggregation/Aggregator.java | 11 ++- .../aggregation/AggregatorFunction.java | 31 +++++- .../aggregation/CountAggregatorFunction.java | 30 +++++- .../FilteredAggregatorFunction.java | 63 ++++++++++++ .../FilteredAggregatorFunctionSupplier.java | 12 ++- .../FilteredGroupingAggregatorFunction.java | 5 + .../FromPartialAggregatorFunction.java | 6 +- .../ToPartialAggregatorFunction.java | 5 +- .../compute/operator/AggregationOperator.java | 5 +- .../AggregatorFunctionTestCase.java | 10 +- .../FilteredAggregatorFunctionTests.java | 96 +++++++++++++++++++ ...lteredGroupingAggregatorFunctionTests.java | 2 +- .../compute/data/BlockSerializationTests.java | 6 +- .../function/AbstractAggregationTestCase.java | 17 ++-- 63 files changed, 2500 insertions(+), 179 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunction.java create mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 67ce0cf709704..48269e7e2af9b 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -240,8 +240,10 @@ private TypeSpec type() { builder.addMethod(intermediateStateDesc()); builder.addMethod(intermediateBlockCount()); builder.addMethod(addRawInput()); - builder.addMethod(addRawVector()); - builder.addMethod(addRawBlock()); + builder.addMethod(addRawVector(false)); + builder.addMethod(addRawVector(true)); + builder.addMethod(addRawBlock(false)); + builder.addMethod(addRawBlock(true)); builder.addMethod(addIntermediateInput()); builder.addMethod(evaluateIntermediate()); builder.addMethod(evaluateFinal()); @@ -345,22 +347,48 @@ private MethodSpec intermediateBlockCount() { private MethodSpec addRawInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); - builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page"); + builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page").addParameter(BOOLEAN_VECTOR, "mask"); if (stateTypeHasFailed) { builder.beginControlFlow("if (state.failed())"); builder.addStatement("return"); builder.endControlFlow(); } + builder.beginControlFlow("if (mask.isConstant())"); + { + builder.beginControlFlow("if (mask.getBoolean(0) == false)"); + { + builder.addComment("Entire page masked away"); + builder.addStatement("return"); + } + builder.endControlFlow(); + builder.addComment("No masking"); + builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); + builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); + builder.beginControlFlow("if (vector != null)"); + builder.addStatement("addRawVector(vector)"); + builder.nextControlFlow("else"); + builder.addStatement("addRawBlock(block)"); + builder.endControlFlow(); + builder.addStatement("return"); + } + builder.endControlFlow(); + builder.addComment("Some positions masked away, others kept"); builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); - builder.beginControlFlow("if (vector != null)").addStatement("addRawVector(vector)"); - builder.nextControlFlow("else").addStatement("addRawBlock(block)").endControlFlow(); + builder.beginControlFlow("if (vector != null)"); + builder.addStatement("addRawVector(vector, mask)"); + builder.nextControlFlow("else"); + builder.addStatement("addRawBlock(block, mask)"); + builder.endControlFlow(); return builder.build(); } - private MethodSpec addRawVector() { + private MethodSpec addRawVector(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawVector"); builder.addModifiers(Modifier.PRIVATE).addParameter(valueVectorType(init, combine), "vector"); + if (masked) { + builder.addParameter(BOOLEAN_VECTOR, "mask"); + } if (stateTypeHasSeen) { builder.addStatement("state.seen(true)"); @@ -372,6 +400,9 @@ private MethodSpec addRawVector() { builder.beginControlFlow("for (int i = 0; i < vector.getPositionCount(); i++)"); { + if (masked) { + builder.beginControlFlow("if (mask.getBoolean(i) == false)").addStatement("continue").endControlFlow(); + } combineRawInput(builder, "vector"); } builder.endControlFlow(); @@ -381,9 +412,12 @@ private MethodSpec addRawVector() { return builder.build(); } - private MethodSpec addRawBlock() { + private MethodSpec addRawBlock(boolean masked) { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawBlock"); builder.addModifiers(Modifier.PRIVATE).addParameter(valueBlockType(init, combine), "block"); + if (masked) { + builder.addParameter(BOOLEAN_VECTOR, "mask"); + } if (valuesIsBytesRef) { // Add bytes_ref scratch var that will only be used for bytes_ref blocks/vectors @@ -391,6 +425,9 @@ private MethodSpec addRawBlock() { } builder.beginControlFlow("for (int p = 0; p < block.getPositionCount(); p++)"); { + if (masked) { + builder.beginControlFlow("if (mask.getBoolean(p) == false)").addStatement("continue").endControlFlow(); + } builder.beginControlFlow("if (block.isNull(p))"); builder.addStatement("continue"); builder.endControlFlow(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index e9b4498d50265..37543714717de 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -53,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -69,6 +85,15 @@ private void addRawVector(BooleanVector vector) { } } + private void addRawVector(BooleanVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + private void addRawBlock(BooleanBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -82,6 +107,22 @@ private void addRawBlock(BooleanBlock block) { } } + private void addRawBlock(BooleanBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index 83917ba218285..77d7e88cf9a93 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -56,13 +57,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +90,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -87,6 +114,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index c720df313bd99..4f0604b4f03c4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java index aad616eac95a1..00e5335138aa9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 083c483a9c9bc..90b4947b77d92 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctIntAggregator.combine(state, vector.getInt(i)); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctIntAggregator.combine(state, block.getInt(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index ee6fb5b470442..99dc37d58a88c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + CountDistinctLongAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + CountDistinctLongAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java index 2ffbcfc2d9458..38de18bea776a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBooleanAggregatorFunction.java @@ -53,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -70,6 +86,16 @@ private void addRawVector(BooleanVector vector) { } } + private void addRawVector(BooleanVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.booleanValue(MaxBooleanAggregator.combine(state.booleanValue(), vector.getBoolean(i))); + } + } + private void addRawBlock(BooleanBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,23 @@ private void addRawBlock(BooleanBlock block) { } } + private void addRawBlock(BooleanBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.booleanValue(MaxBooleanAggregator.combine(state.booleanValue(), block.getBoolean(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java index 62897c61ea80e..1a4d440d2b8bc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java @@ -56,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +89,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MaxBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -87,6 +113,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MaxBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index f78a8773ccfcd..266977e2a689c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java index 0dcef4341727d..3a4dcaa3289fe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxFloatAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.floatValue(MaxFloatAggregator.combine(state.floatValue(), vector.getFloat(i))); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.floatValue(MaxFloatAggregator.combine(state.floatValue(), block.getFloat(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 6f83ee7224879..d5c0cea243499 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.intValue(MaxIntAggregator.combine(state.intValue(), vector.getInt(i))); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.intValue(MaxIntAggregator.combine(state.intValue(), block.getInt(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java index 9f714246ea332..13c74775d2796 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIpAggregatorFunction.java @@ -56,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +89,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MaxIpAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -87,6 +113,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MaxIpAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index 8826128a68837..d2acff0509dfe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.longValue(MaxLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(MaxLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index 4bcf08ce0fa35..068bb7a2bf0a5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; @@ -55,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -71,6 +88,15 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MedianAbsoluteDeviationDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,22 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java index 38a16859140f8..71cb9964001f3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationFloatAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -55,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -71,6 +88,15 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MedianAbsoluteDeviationFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,22 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index db9dbdab52244..b4f547fb5b425 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -55,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -71,6 +88,15 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MedianAbsoluteDeviationIntAggregator.combine(state, vector.getInt(i)); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,22 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationIntAggregator.combine(state, block.getInt(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index bf5fd51d7ed17..946e0243cf6f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -55,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -71,6 +88,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MedianAbsoluteDeviationLongAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MedianAbsoluteDeviationLongAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java index 101a6c7f9169a..9bd63ed8efbd8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBooleanAggregatorFunction.java @@ -53,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -70,6 +86,16 @@ private void addRawVector(BooleanVector vector) { } } + private void addRawVector(BooleanVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), vector.getBoolean(i))); + } + } + private void addRawBlock(BooleanBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -84,6 +110,23 @@ private void addRawBlock(BooleanBlock block) { } } + private void addRawBlock(BooleanBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.booleanValue(MinBooleanAggregator.combine(state.booleanValue(), block.getBoolean(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java index 3346dd762f17f..2789f18a19dfc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinBytesRefAggregatorFunction.java @@ -56,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +89,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MinBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -87,6 +113,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MinBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index 7d7544e5d8470..e736f91e0b38c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), vector.getDouble(i))); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), block.getDouble(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java index ecabcbdcf57bb..9d67ccb8fb736 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinFloatAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.floatValue(MinFloatAggregator.combine(state.floatValue(), vector.getFloat(i))); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.floatValue(MinFloatAggregator.combine(state.floatValue(), block.getFloat(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index 0f2385cc120f9..a5ead0bd635c0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.intValue(MinIntAggregator.combine(state.intValue(), vector.getInt(i))); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.intValue(MinIntAggregator.combine(state.intValue(), block.getInt(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java index a47c901d70db4..60ba1993f45d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIpAggregatorFunction.java @@ -56,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +89,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + MinIpAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -87,6 +113,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + MinIpAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 805729588158e..b7bab86d6423e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.longValue(MinLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(MinLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index cd7a5b5974442..5a00e71baecd3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + PercentileDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java index 8f0ffd81e64b6..f3ce29a57d57a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileFloatAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + PercentileFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index b9b1c2e90b768..8c4985e83012c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + PercentileIntAggregator.combine(state, vector.getInt(i)); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileIntAggregator.combine(state, block.getInt(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index cc785ce55bb55..09093ddb3f42b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + PercentileLongAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + PercentileLongAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index 354726f82b8f3..7f0e0b4e15158 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -56,13 +56,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -73,6 +89,16 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SumDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,23 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SumDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java index 3dedc327294d5..d916b832d77ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunction.java @@ -58,13 +58,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -75,6 +91,16 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SumFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -89,6 +115,23 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SumFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index e210429991aa6..5cd1abc35d28f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -57,13 +57,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +90,16 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.longValue(SumIntAggregator.combine(state.longValue(), vector.getInt(i))); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -88,6 +114,23 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(SumIntAggregator.combine(state.longValue(), block.getInt(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index 38d1b3de78265..e7781f82b1021 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -55,13 +55,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -72,6 +88,16 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + state.seen(true); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + state.longValue(SumLongAggregator.combine(state.longValue(), vector.getLong(i))); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -86,6 +112,23 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + state.seen(true); + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + state.longValue(SumLongAggregator.combine(state.longValue(), block.getLong(i))); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java index 617ebfd004808..0580dc297a362 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopBooleanAggregatorFunction.java @@ -58,13 +58,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +90,15 @@ private void addRawVector(BooleanVector vector) { } } + private void addRawVector(BooleanVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + private void addRawBlock(BooleanBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +112,22 @@ private void addRawBlock(BooleanBlock block) { } } + private void addRawBlock(BooleanBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java index 8549da42c0d85..899af1a58851b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopDoubleAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java index 40ac1432caee8..168e7685c5273 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopFloatAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.FloatVector; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java index f6e858b69a639..80964decf572d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIntAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopIntAggregator.combine(state, vector.getInt(i)); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopIntAggregator.combine(state, block.getInt(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java index 43f4d78d59cd9..90d8d7c124244 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopIpAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -59,13 +60,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -76,6 +93,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopIpAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -90,6 +117,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopIpAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java index c355e401478d8..18eef5a29b895 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/TopLongAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -58,13 +59,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -74,6 +91,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + TopLongAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -87,6 +113,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + TopLongAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java index 3e9bc91e0039a..d71d9a7b45bdb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBooleanAggregatorFunction.java @@ -52,13 +52,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BooleanBlock block = page.getBlock(channels.get(0)); + BooleanVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -68,6 +84,15 @@ private void addRawVector(BooleanVector vector) { } } + private void addRawVector(BooleanVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesBooleanAggregator.combine(state, vector.getBoolean(i)); + } + } + private void addRawBlock(BooleanBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -81,6 +106,22 @@ private void addRawBlock(BooleanBlock block) { } } + private void addRawBlock(BooleanBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBooleanAggregator.combine(state, block.getBoolean(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java index 24b88f59e38f4..56e79b64e3b86 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunction.java @@ -11,6 +11,7 @@ import java.util.List; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; @@ -53,13 +54,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -70,6 +87,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesBytesRefAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -84,6 +111,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesBytesRefAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java index a6295038dbd7a..3ec31b0fd5a4d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesDoubleAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; @@ -52,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + DoubleBlock block = page.getBlock(channels.get(0)); + DoubleVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -68,6 +85,15 @@ private void addRawVector(DoubleVector vector) { } } + private void addRawVector(DoubleVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesDoubleAggregator.combine(state, vector.getDouble(i)); + } + } + private void addRawBlock(DoubleBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -81,6 +107,22 @@ private void addRawBlock(DoubleBlock block) { } } + private void addRawBlock(DoubleBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesDoubleAggregator.combine(state, block.getDouble(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java index c7385e87bfbf2..00ab8db1c4ac6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesFloatAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.FloatBlock; import org.elasticsearch.compute.data.FloatVector; @@ -52,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + FloatBlock block = page.getBlock(channels.get(0)); + FloatVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept FloatBlock block = page.getBlock(channels.get(0)); FloatVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -68,6 +85,15 @@ private void addRawVector(FloatVector vector) { } } + private void addRawVector(FloatVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesFloatAggregator.combine(state, vector.getFloat(i)); + } + } + private void addRawBlock(FloatBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -81,6 +107,22 @@ private void addRawBlock(FloatBlock block) { } } + private void addRawBlock(FloatBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesFloatAggregator.combine(state, block.getFloat(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java index 19e578936cd14..5a0d7c893e607 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesIntAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -52,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + IntBlock block = page.getBlock(channels.get(0)); + IntVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -68,6 +85,15 @@ private void addRawVector(IntVector vector) { } } + private void addRawVector(IntVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesIntAggregator.combine(state, vector.getInt(i)); + } + } + private void addRawBlock(IntBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -81,6 +107,22 @@ private void addRawBlock(IntBlock block) { } } + private void addRawBlock(IntBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesIntAggregator.combine(state, block.getInt(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java index 420da87076a37..ca9a8347e3a41 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunction.java @@ -10,6 +10,7 @@ import java.lang.StringBuilder; import java.util.List; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -52,13 +53,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -68,6 +85,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + ValuesLongAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -81,6 +107,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + ValuesLongAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java index eef094ce2ecfa..a427c75c63fff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointDocValuesAggregatorFunction.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; @@ -60,13 +61,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -76,6 +93,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -89,6 +115,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidCartesianPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java index bdc7c58a6c963..c2086f2ab3d98 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidCartesianPointSourceValuesAggregatorFunction.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; @@ -63,13 +64,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -80,6 +97,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -94,6 +121,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidCartesianPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java index fcd17d4c5cd86..0509c03ebf77c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointDocValuesAggregatorFunction.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.ElementType; @@ -60,13 +61,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + LongBlock block = page.getBlock(channels.get(0)); + LongVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -76,6 +93,15 @@ private void addRawVector(LongVector vector) { } } + private void addRawVector(LongVector vector, BooleanVector mask) { + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialCentroidGeoPointDocValuesAggregator.combine(state, vector.getLong(i)); + } + } + private void addRawBlock(LongBlock block) { for (int p = 0; p < block.getPositionCount(); p++) { if (block.isNull(p)) { @@ -89,6 +115,22 @@ private void addRawBlock(LongBlock block) { } } + private void addRawBlock(LongBlock block, BooleanVector mask) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidGeoPointDocValuesAggregator.combine(state, block.getLong(i)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java index be7b8d9758d1c..10a29c841b79f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/spatial/SpatialCentroidGeoPointSourceValuesAggregatorFunction.java @@ -13,6 +13,7 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.aggregation.IntermediateStateDesc; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; @@ -63,13 +64,29 @@ public int intermediateBlockCount() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + // Entire page masked away + return; + } + // No masking + BytesRefBlock block = page.getBlock(channels.get(0)); + BytesRefVector vector = block.asVector(); + if (vector != null) { + addRawVector(vector); + } else { + addRawBlock(block); + } + return; + } + // Some positions masked away, others kept BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { - addRawVector(vector); + addRawVector(vector, mask); } else { - addRawBlock(block); + addRawBlock(block, mask); } } @@ -80,6 +97,16 @@ private void addRawVector(BytesRefVector vector) { } } + private void addRawVector(BytesRefVector vector, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int i = 0; i < vector.getPositionCount(); i++) { + if (mask.getBoolean(i) == false) { + continue; + } + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, vector.getBytesRef(i, scratch)); + } + } + private void addRawBlock(BytesRefBlock block) { BytesRef scratch = new BytesRef(); for (int p = 0; p < block.getPositionCount(); p++) { @@ -94,6 +121,23 @@ private void addRawBlock(BytesRefBlock block) { } } + private void addRawBlock(BytesRefBlock block, BooleanVector mask) { + BytesRef scratch = new BytesRef(); + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p) == false) { + continue; + } + if (block.isNull(p)) { + continue; + } + int start = block.getFirstValueIndex(p); + int end = start + block.getValueCount(p); + for (int i = start; i < end; i++) { + SpatialCentroidGeoPointSourceValuesAggregator.combine(state, block.getBytesRef(i, scratch)); + } + } + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/Aggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/Aggregator.java index 1a58a27e3377f..f0471a995f21e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/Aggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/Aggregator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; @@ -16,9 +17,6 @@ import java.util.function.Function; public class Aggregator implements Releasable { - - public static final Object[] EMPTY_PARAMS = new Object[] {}; - private final AggregatorFunction aggregatorFunction; private final AggregatorMode mode; @@ -35,11 +33,14 @@ public int evaluateBlockCount() { return mode.isOutputPartial() ? aggregatorFunction.intermediateBlockCount() : 1; } - public void processPage(Page page) { + public void processPage(Page page, BooleanVector mask) { if (mode.isInputPartial()) { + if (mask.isConstant() == false || mask.getBoolean(0) == false) { + throw new IllegalStateException("can't mask intermediate input"); + } aggregatorFunction.addIntermediateInput(page); } else { - aggregatorFunction.addRawInput(page); + aggregatorFunction.addRawInput(page, mask); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunction.java index 3d214ff3d2e0d..6050c774dd201 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/AggregatorFunction.java @@ -8,18 +8,43 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.core.Releasable; +/** + * A non-grouped aggregation. + */ public interface AggregatorFunction extends Releasable { - - void addRawInput(Page page); - + /** + * Add a page worth of data to the aggregation. + * @param mask a mask to apply to the positions. If the position is {@code false} then + * the aggregation should skip it. + */ + void addRawInput(Page page, BooleanVector mask); + + /** + * Add a pre-aggregated page worth of "intermediate" input. This intermediate input + * will have been created by calling {@link #evaluateIntermediate} on this agg, though + * likely in a different {@link Driver} and maybe on a different + * physical node. + */ void addIntermediateInput(Page page); + /** + * Build pre-aggregated "intermediate" data to pass to the {@link #addIntermediateInput}. + * @param blocks write the output into this array + * @param offset write the first {@link Block} at this offset in {@code blocks} + */ void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext); + /** + * Build the final results from running this agg. + * @param blocks write the output into this array + * @param offset write the first {@link Block} at this offset in {@code blocks} + */ void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext); /** The number of blocks used by intermediate state. */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java index c32f6f4703a79..c6416f6d075db 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountAggregatorFunction.java @@ -72,13 +72,39 @@ private int blockIndex() { } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { Block block = page.getBlock(blockIndex()); LongState state = this.state; - int count = countAll ? block.getPositionCount() : block.getTotalValueCount(); + int count; + if (mask.isConstant()) { + if (mask.getBoolean(0) == false) { + return; + } + count = countAll ? block.getPositionCount() : block.getTotalValueCount(); + } else { + count = countMasked(block, mask); + } state.longValue(state.longValue() + count); } + private int countMasked(Block block, BooleanVector mask) { + int count = 0; + if (countAll) { + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p)) { + count++; + } + } + return count; + } + for (int p = 0; p < block.getPositionCount(); p++) { + if (mask.getBoolean(p)) { + count += block.getValueCount(p); + } + } + return count; + } + @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunction.java new file mode 100644 index 0000000000000..04d78673c42e8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.ToMask; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; + +/** + * A {@link AggregatorFunction} that wraps another, filtering which positions + * are supplied to the aggregator. + *

+ * This works by running the filter and providing its results in the {@code mask} + * parameter for {@link #addRawInput}. + *

+ */ +record FilteredAggregatorFunction(AggregatorFunction next, EvalOperator.ExpressionEvaluator filter) implements AggregatorFunction { + @Override + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant() == false || mask.getBoolean(0) == false) { + throw new UnsupportedOperationException("can't filter twice"); + } + try (BooleanBlock filterResult = ((BooleanBlock) filter.eval(page)); ToMask m = filterResult.toMask()) { + // TODO warn on mv fields + next.addRawInput(page, m.mask()); + } + } + + @Override + public void addIntermediateInput(Page page) { + next.addIntermediateInput(page); + } + + @Override + public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { + next.evaluateIntermediate(blocks, offset, driverContext); + } + + @Override + public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { + next.evaluateFinal(blocks, offset, driverContext); + } + + @Override + public int intermediateBlockCount() { + return next.intermediateBlockCount(); + } + + @Override + public void close() { + Releasables.close(next, filter); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java index c8a8696c03449..ed63a283b3568 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionSupplier.java @@ -21,7 +21,17 @@ public record FilteredAggregatorFunctionSupplier(AggregatorFunctionSupplier next @Override public AggregatorFunction aggregator(DriverContext driverContext) { - throw new UnsupportedOperationException("TODO"); + AggregatorFunction next = this.next.aggregator(driverContext); + EvalOperator.ExpressionEvaluator filter = null; + try { + filter = this.filter.get(driverContext); + AggregatorFunction result = new FilteredAggregatorFunction(next, filter); + next = null; + filter = null; + return result; + } finally { + Releasables.closeExpectNoException(next, filter); + } } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java index c8dd80d7afe99..3e38b6d6fe9fa 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunction.java @@ -23,6 +23,11 @@ /** * A {@link GroupingAggregatorFunction} that wraps another, filtering which positions * are supplied to the aggregator. + *

+ * This filtering works by setting all of the group ids for filtered positions to + * {@code null}. {@link GroupingAggregatorFunction} will then skip collecting those + * positions. + *

*/ record FilteredGroupingAggregatorFunction(GroupingAggregatorFunction next, EvalOperator.ExpressionEvaluator filter) implements diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java index 8d4ce2971d34b..1f6270528593c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/FromPartialAggregatorFunction.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.CompositeBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; @@ -41,7 +42,10 @@ public FromPartialAggregatorFunction(DriverContext driverContext, GroupingAggreg } @Override - public void addRawInput(Page page) { + public void addRawInput(Page page, BooleanVector mask) { + if (mask.isConstant() == false || mask.getBoolean(0) == false) { + throw new IllegalStateException("can't mask partial"); + } addIntermediateInput(page); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java index 2083b8ebbfff2..6524af116d410 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/ToPartialAggregatorFunction.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.CompositeBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; @@ -37,8 +38,8 @@ public ToPartialAggregatorFunction(AggregatorFunction delegate, List ch } @Override - public void addRawInput(Page page) { - delegate.addRawInput(page); + public void addRawInput(Page page, BooleanVector mask) { + delegate.addRawInput(page, mask); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java index 260b72dc35d9c..175f550ab5d3b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/AggregationOperator.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.aggregation.Aggregator.Factory; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -93,9 +94,9 @@ public void addInput(Page page) { long start = System.nanoTime(); checkState(needsInput(), "Operator is already finishing"); requireNonNull(page, "page is null"); - try { + try (BooleanVector noMasking = driverContext.blockFactory().newConstantBooleanVector(true, page.getPositionCount())) { for (Aggregator aggregator : aggregators) { - aggregator.processPage(page); + aggregator.processPage(page, noMasking); } } finally { page.releaseBlocks(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 6e56b96bda06e..275038e6d2f02 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -69,8 +69,14 @@ protected final Matcher expectedDescriptionOfSimple() { @Override protected final Matcher expectedToStringOfSimple() { + return equalTo( + "AggregationOperator[aggregators=[Aggregator[aggregatorFunction=" + expectedToStringOfSimpleAggregator() + ", mode=SINGLE]]]" + ); + } + + protected String expectedToStringOfSimpleAggregator() { String type = getClass().getSimpleName().replace("Tests", ""); - return equalTo("AggregationOperator[aggregators=[Aggregator[aggregatorFunction=" + type + "[channels=[0]], mode=SINGLE]]]"); + return type + "[channels=[0]]"; } @Override @@ -162,7 +168,7 @@ public final void testEmptyInputInitialIntermediateFinal() { // Returns an intermediate state that is equivalent to what the local execution planner will emit // if it determines that certain shards have no relevant data. - final List nullIntermediateState(BlockFactory blockFactory) { + List nullIntermediateState(BlockFactory blockFactory) { try (var agg = aggregatorFunction(List.of()).aggregator(driverContext())) { var method = agg.getClass().getMethod("intermediateStateDesc"); @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java new file mode 100644 index 0000000000000..6ad3251d3c120 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredAggregatorFunctionTests.java @@ -0,0 +1,96 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; +import org.elasticsearch.compute.operator.SourceOperator; +import org.junit.After; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; + +public class FilteredAggregatorFunctionTests extends AggregatorFunctionTestCase { + private final List unclosed = Collections.synchronizedList(new ArrayList<>()); + + // TODO some version of this test that applies across all aggs + @Override + protected AggregatorFunctionSupplier aggregatorFunction(List inputChannels) { + return new FilteredAggregatorFunctionSupplier( + new SumIntAggregatorFunctionSupplier(inputChannels), + new FilteredGroupingAggregatorFunctionTests.AnyGreaterThanFactory(unclosed, inputChannels) + ); + } + + @Override + protected String expectedDescriptionOfAggregator() { + return "Filtered[next=sum of ints, filter=any > 0]"; + } + + @Override + protected String expectedToStringOfSimpleAggregator() { + return "FilteredAggregatorFunction[next=SumIntAggregatorFunction[channels=[0]], filter=any > 0]"; + } + + @Override + protected void assertSimpleOutput(List input, Block result) { + long sum = 0; + for (Block block : input) { + IntBlock ints = (IntBlock) block; + for (int p = 0; p < ints.getPositionCount(); p++) { + /* + * Perform the sum on the values *only* if any of the + * values is > 0 to line up with the condition + */ + int start = ints.getFirstValueIndex(p); + int end = start + ints.getValueCount(p); + boolean selected = false; + for (int i = start; i < end; i++) { + selected |= ints.getInt(i) > 0; + } + if (selected == false) { + continue; + } + start = ints.getFirstValueIndex(p); + end = start + ints.getValueCount(p); + for (int i = start; i < end; i++) { + sum += ints.getInt(i); + } + } + } + assertThat(((LongBlock) result).getLong(0), equalTo(sum)); + } + + @Override + protected List nullIntermediateState(BlockFactory blockFactory) { + return new SumIntAggregatorFunctionTests().nullIntermediateState(blockFactory); + } + + @Override + protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { + int max = between(1, Integer.MAX_VALUE / size / 5); + return new SequenceIntBlockSourceOperator(blockFactory, IntStream.range(0, size).map(l -> between(-max, max))); + } + + @After + public void checkUnclosed() { + for (Exception tracker : unclosed) { + logger.error("unclosed", tracker); + } + assertThat(unclosed, empty()); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java index 7b924076c0186..87cb99bd0709f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/FilteredGroupingAggregatorFunctionTests.java @@ -116,7 +116,7 @@ public void checkUnclosed() { * This checks if *any* of the integers are > 0. If so we push the group to * the aggregation. */ - private record AnyGreaterThanFactory(List unclosed, List inputChannels) + record AnyGreaterThanFactory(List unclosed, List inputChannels) implements EvalOperator.ExpressionEvaluator.Factory { @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 8ca02b64f01ff..5a439becd4757 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -236,10 +236,10 @@ public void testConstantNullBlock() throws IOException { public void testSimulateAggs() { DriverContext driverCtx = driverContext(); Page page = new Page(blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 10).asBlock()); - var bigArrays = BigArrays.NON_RECYCLING_INSTANCE; - var params = new Object[] {}; var function = SumLongAggregatorFunction.create(driverCtx, List.of(0)); - function.addRawInput(page); + try (BooleanVector noMasking = driverContext().blockFactory().newConstantBooleanVector(true, page.getPositionCount())) { + function.addRawInput(page, noMasking); + } Block[] blocks = new Block[function.intermediateBlockCount()]; try { function.evaluateIntermediate(blocks, 0, driverCtx); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java index eb9f10f7b2e0f..1c917a961a343 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractAggregationTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; @@ -150,8 +151,10 @@ private void aggregateSingleMode(Expression expression) { Object result; try (var aggregator = aggregator(expression, initialInputChannels(), AggregatorMode.SINGLE)) { for (Page inputPage : rows(testCase.getMultiRowFields())) { - try { - aggregator.processPage(inputPage); + try ( + BooleanVector noMasking = driverContext().blockFactory().newConstantBooleanVector(true, inputPage.getPositionCount()) + ) { + aggregator.processPage(inputPage, noMasking); } finally { inputPage.releaseBlocks(); } @@ -217,8 +220,10 @@ private void aggregateWithIntermediates(Expression expression) { intermediateBlocks = new Block[intermediateBlockOffset + intermediateStates + intermediateBlockExtraSize]; for (Page inputPage : rows(testCase.getMultiRowFields())) { - try { - aggregator.processPage(inputPage); + try ( + BooleanVector noMasking = driverContext().blockFactory().newConstantBooleanVector(true, inputPage.getPositionCount()) + ) { + aggregator.processPage(inputPage, noMasking); } finally { inputPage.releaseBlocks(); } @@ -247,9 +252,9 @@ private void aggregateWithIntermediates(Expression expression) { ) ) { Page inputPage = new Page(intermediateBlocks); - try { + try (BooleanVector noMasking = driverContext().blockFactory().newConstantBooleanVector(true, inputPage.getPositionCount())) { if (inputPage.getPositionCount() > 0) { - aggregator.processPage(inputPage); + aggregator.processPage(inputPage, noMasking); } } finally { inputPage.releaseBlocks(); From bceeced3cc82f8e410a221778441cf1684001598 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Wed, 11 Sep 2024 21:56:50 +0100 Subject: [PATCH 36/58] Bump Elasticsearch to a minimum of JDK 21 (#112252) --- .ci/java-versions-aarch64.properties | 2 +- .ci/java-versions-fips.properties | 2 +- .ci/java-versions.properties | 2 +- .../src/main/resources/minimumCompilerVersion | 2 +- build-tools-internal/src/main/resources/minimumCompilerVersion | 2 +- build-tools-internal/src/main/resources/minimumRuntimeVersion | 2 +- .../java/org/elasticsearch/compute/gen/AggregatorProcessor.java | 2 +- .../java/org/elasticsearch/compute/gen/ConsumeProcessor.java | 2 +- .../java/org/elasticsearch/compute/gen/EvaluatorProcessor.java | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.ci/java-versions-aarch64.properties b/.ci/java-versions-aarch64.properties index b1e0f4cfe8aff..8815d5011a8e7 100644 --- a/.ci/java-versions-aarch64.properties +++ b/.ci/java-versions-aarch64.properties @@ -4,4 +4,4 @@ # build and test Elasticsearch for this branch. Valid Java versions # are 'java' or 'openjdk' followed by the major release number. -ES_BUILD_JAVA=jdk17 +ES_BUILD_JAVA=jdk21 diff --git a/.ci/java-versions-fips.properties b/.ci/java-versions-fips.properties index fa68739355768..87490374bb994 100644 --- a/.ci/java-versions-fips.properties +++ b/.ci/java-versions-fips.properties @@ -4,4 +4,4 @@ # build and test Elasticsearch for this branch. Valid Java versions # are 'java' or 'openjdk' followed by the major release number. -ES_BUILD_JAVA=openjdk17 +ES_BUILD_JAVA=openjdk21 diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties index 21884973742ba..5ed97ab248c2f 100644 --- a/.ci/java-versions.properties +++ b/.ci/java-versions.properties @@ -4,4 +4,4 @@ # build and test Elasticsearch for this branch. Valid Java versions # are 'java' or 'openjdk' followed by the major release number. -ES_BUILD_JAVA=openjdk17 +ES_BUILD_JAVA=openjdk21 diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/build-tools-internal/src/main/resources/minimumCompilerVersion b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/build-tools-internal/src/main/resources/minimumCompilerVersion index 98d9bcb75a685..aabe6ec3909c9 100644 --- a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/build-tools-internal/src/main/resources/minimumCompilerVersion +++ b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_git/remote/build-tools-internal/src/main/resources/minimumCompilerVersion @@ -1 +1 @@ -17 +21 diff --git a/build-tools-internal/src/main/resources/minimumCompilerVersion b/build-tools-internal/src/main/resources/minimumCompilerVersion index 98d9bcb75a685..aabe6ec3909c9 100644 --- a/build-tools-internal/src/main/resources/minimumCompilerVersion +++ b/build-tools-internal/src/main/resources/minimumCompilerVersion @@ -1 +1 @@ -17 +21 diff --git a/build-tools-internal/src/main/resources/minimumRuntimeVersion b/build-tools-internal/src/main/resources/minimumRuntimeVersion index 98d9bcb75a685..aabe6ec3909c9 100644 --- a/build-tools-internal/src/main/resources/minimumRuntimeVersion +++ b/build-tools-internal/src/main/resources/minimumRuntimeVersion @@ -1 +1 @@ -17 +21 diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java index 4b1f946a1d176..863db86eb934a 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorProcessor.java @@ -52,7 +52,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.RELEASE_17; + return SourceVersion.RELEASE_21; } @Override diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java index b76b1cc7ea74b..b4e74d52ffeb8 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java @@ -48,7 +48,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.latest(); + return SourceVersion.RELEASE_21; } @Override diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java index 09012c7b3a48a..ec36908833661 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java @@ -44,7 +44,7 @@ public Set getSupportedAnnotationTypes() { @Override public SourceVersion getSupportedSourceVersion() { - return SourceVersion.RELEASE_17; + return SourceVersion.RELEASE_21; } @Override From a211d801311ae33a64a64795b6d22b781954a167 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 12 Sep 2024 07:15:08 +1000 Subject: [PATCH 37/58] Mute org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT testCorruption #112769 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index ac35776db665f..777f52bb1db39 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -208,6 +208,9 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/cluster/stats/line_1450} issue: https://github.com/elastic/elasticsearch/issues/112732 +- class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT + method: testCorruption + issue: https://github.com/elastic/elasticsearch/issues/112769 # Examples: # From cef6f0bd232b30cf26787edebed0e327a3d59029 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 11 Sep 2024 14:18:17 -0700 Subject: [PATCH 38/58] [DOCS] Augment installation warnings (#112756) --- docs/plugins/install_remove.asciidoc | 2 +- docs/reference/setup/install/deb.asciidoc | 2 +- docs/reference/setup/install/rpm.asciidoc | 2 +- docs/reference/setup/install/targz.asciidoc | 4 ++-- docs/reference/setup/install/zip-windows.asciidoc | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/plugins/install_remove.asciidoc b/docs/plugins/install_remove.asciidoc index c9d163fb30ef2..893af4dac42ff 100644 --- a/docs/plugins/install_remove.asciidoc +++ b/docs/plugins/install_remove.asciidoc @@ -4,7 +4,7 @@ ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of the Elastic Stack has not yet been released. +WARNING: Version {version} of the Elastic Stack has not yet been released. The plugin might not be available. endif::[] diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index c7e146a5442cf..f6093494b6e5c 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -108,7 +108,7 @@ include::skip-set-kernel-parameters.asciidoc[] ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of Elasticsearch has not yet been released. +WARNING: Version {version} of Elasticsearch has not yet been released. The package might not be available. endif::[] diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index 60815d570ab35..085c48b0ce4b7 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -101,7 +101,7 @@ endif::[] ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of Elasticsearch has not yet been released. +WARNING: Version {version} of Elasticsearch has not yet been released. The RPM might not be available. endif::[] diff --git a/docs/reference/setup/install/targz.asciidoc b/docs/reference/setup/install/targz.asciidoc index d40a4bfdd7e74..cab5be47a8c4f 100644 --- a/docs/reference/setup/install/targz.asciidoc +++ b/docs/reference/setup/install/targz.asciidoc @@ -21,7 +21,7 @@ see the <> ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of {es} has not yet been released. +WARNING: Version {version} of {es} has not yet been released. The archive might not be available. endif::[] @@ -44,7 +44,7 @@ cd elasticsearch-{version}/ <2> ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of {es} has not yet been released. +WARNING: Version {version} of {es} has not yet been released. The archive might not be available. endif::[] diff --git a/docs/reference/setup/install/zip-windows.asciidoc b/docs/reference/setup/install/zip-windows.asciidoc index eb84ff149f8bd..27a330debd814 100644 --- a/docs/reference/setup/install/zip-windows.asciidoc +++ b/docs/reference/setup/install/zip-windows.asciidoc @@ -31,7 +31,7 @@ see the <> ifeval::["{release-state}"=="unreleased"] -WARNING: Version {version} of {es} has not yet been released. +WARNING: Version {version} of {es} has not yet been released. The archive might not be available. endif::[] From 196728e2f2cd9e108b66af7927314f93650b572d Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 11 Sep 2024 15:32:40 -0600 Subject: [PATCH 39/58] (Doc+) CAT Nodes default columns (#112715) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 👋 howdy, team! 1. Related to https://github.com/elastic/dev/issues/2631, highlights customers are usually seeking `heap.percent` instead of `ram.percent` 2. Aligns the claimed "(Default)" columns in doc to what returned for v8.15.1 test cluster --- docs/reference/cat/nodes.asciidoc | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/reference/cat/nodes.asciidoc b/docs/reference/cat/nodes.asciidoc index fc5b01f9234e3..5f329c00efd7f 100644 --- a/docs/reference/cat/nodes.asciidoc +++ b/docs/reference/cat/nodes.asciidoc @@ -50,16 +50,16 @@ Valid columns are: (Default) IP address, such as `127.0.1.1`. `heap.percent`, `hp`, `heapPercent`:: -(Default) Maximum configured heap, such as `7`. +(Default) Used percentage of total allocated Elasticsearch JVM heap, such as `7`. This reflects only the {es} process running within the operating system and is the most direct indicator of its JVM/heap/memory resource performance. `heap.max`, `hm`, `heapMax`:: -(Default) Total heap, such as `4gb`. +Total heap, such as `4gb`. `ram.percent`, `rp`, `ramPercent`:: -(Default) Used total memory percentage, such as `47`. +(Default) Used percentage of total operating system's memory, such as `47`. This reflects all processes running on operating system instead of only {es} and is not guaranteed to correlate to its performance. `file_desc.percent`, `fdp`, `fileDescriptorPercent`:: -(Default) Used file descriptors percentage, such as `1`. +Used file descriptors percentage, such as `1`. `node.role`, `r`, `role`, `nodeRole`:: (Default) Roles of the node. @@ -138,16 +138,16 @@ Used file descriptors, such as `123`. Maximum number of file descriptors, such as `1024`. `cpu`:: -Recent system CPU usage as percent, such as `12`. +(Default) Recent system CPU usage as percent, such as `12`. `load_1m`, `l`:: -Most recent load average, such as `0.22`. +(Default) Most recent load average, such as `0.22`. `load_5m`, `l`:: -Load average for the last five minutes, such as `0.78`. +(Default) Load average for the last five minutes, such as `0.78`. `load_15m`, `l`:: -Load average for the last fifteen minutes, such as `1.24`. +(Default) Load average for the last fifteen minutes, such as `1.24`. `uptime`, `u`:: Node uptime, such as `17.3m`. From c4932f275676b25d8772d3e5b5c3df56b5ae990f Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 11 Sep 2024 15:33:47 -0600 Subject: [PATCH 40/58] (Doc+) Terminating Exit Codes (#112530) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 👋 howdy, team! Mini PR to cross-replicate [this knowledge article](https://support.elastic.co/knowledge/6610ba83) about Elasticsearch's exit codes which expands [this ES doc section](https://www.elastic.co/guide/en/elasticsearch/reference/master/stopping-elasticsearch.html#fatal-errors). --- docs/reference/setup/stopping.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/setup/stopping.asciidoc b/docs/reference/setup/stopping.asciidoc index 8c3a8d40fa1d2..f80812f026936 100644 --- a/docs/reference/setup/stopping.asciidoc +++ b/docs/reference/setup/stopping.asciidoc @@ -50,9 +50,14 @@ such a shutdown, it does not go through an orderly shutdown as described above. process will also return with a special status code indicating the nature of the error. [horizontal] +Killed by jvmkiller agent:: 158 +User or kernel SIGTERM:: 143 +Slain by kernel oom-killer:: 137 +Segmentation fault:: 134 JVM internal error:: 128 Out of memory error:: 127 Stack overflow error:: 126 Unknown virtual machine error:: 125 Serious I/O error:: 124 +Bootstrap check failure:: 78 Unknown fatal error:: 1 From 2b426f70036c0b3bce4f60958fd0b09443ca716d Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 11 Sep 2024 14:50:06 -0700 Subject: [PATCH 41/58] Fix verifyVersions task (#112765) --- .../java/org/elasticsearch/gradle/internal/BwcVersions.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java index 720c159f75552..7b1bc14fda8af 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcVersions.java @@ -223,7 +223,10 @@ public void compareToAuthoritative(List authoritativeReleasedVersions) } private List getReleased() { - return versions.stream().filter(v -> unreleased.containsKey(v) == false).toList(); + return versions.stream() + .filter(v -> v.getMajor() >= currentVersion.getMajor() - 1) + .filter(v -> unreleased.containsKey(v) == false) + .toList(); } /** From 7b17077144b5d4599743e3c50cb3f596eda9d2f8 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Wed, 11 Sep 2024 16:05:15 -0600 Subject: [PATCH 42/58] (Doc+) Inference Pipeline ignores Mapping Analyzers (#112522) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * (Doc+) Inference Pipeline ignores Mapping Analyzers From internal Dev feedback (will cross-link after), this updates that inference processors within ingest pipelines run before mapping analyzers effectively ignoring them. So if users want analyzers to take effect, they would need to select the analyzer's ingest pipeline process equivalent and run it higher in flow than the inference processor. --------- Co-authored-by: István Zoltán Szabó --- docs/reference/ingest/processors/inference.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index 982da1fe17f7a..c942959d34e53 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -31,6 +31,7 @@ include::common-options.asciidoc[] `field_map` fields. For NLP models, use the `input_output` option. For {dfanalytics} models, use the `target_field` and `field_map` option. * Each {infer} input field must be single strings, not arrays of strings. +* The `input_field` is processed as is and ignores any <>'s <> at time of {infer} run. ================================================== [discrete] From 18a48c7a79744ab416cf1b4317fa865e4da99d8d Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 11 Sep 2024 16:18:36 -0700 Subject: [PATCH 43/58] Estimate segment field usages (#112760) We have introduced a new memory estimation method in serverless, based on the number of segments and the fields within them. This new approach works well overall, but it still falls short in cases where most fields are used more than once - for example, in both doc_values and postings, or doc_values and points. This change exposes the total usage of fields in segments, allowing us to adjust the memory estimate for these cases. --- .../codec/DeduplicatingFieldInfosFormat.java | 2 +- .../index/codec/FieldInfosWithUsages.java | 49 +++++++++++++++++++ .../elasticsearch/index/shard/IndexShard.java | 14 +++++- .../index/shard/ShardFieldStats.java | 4 +- .../index/shard/IndexShardTests.java | 9 ++++ 5 files changed, 74 insertions(+), 4 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/codec/FieldInfosWithUsages.java diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 75ec265a68391..67d98bf30d6ec 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -65,7 +65,7 @@ public FieldInfos read(Directory directory, SegmentInfo segmentInfo, String segm fi.isParentField() ); } - return new FieldInfos(deduplicated); + return new FieldInfosWithUsages(deduplicated); } private static Map internStringStringMap(Map m) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/FieldInfosWithUsages.java b/server/src/main/java/org/elasticsearch/index/codec/FieldInfosWithUsages.java new file mode 100644 index 0000000000000..e0d4a94ee28a1 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/FieldInfosWithUsages.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.IndexOptions; + +public class FieldInfosWithUsages extends FieldInfos { + private final int totalUsages; + + public FieldInfosWithUsages(FieldInfo[] infos) { + super(infos); + this.totalUsages = computeUsages(infos); + } + + public static int computeUsages(FieldInfo[] infos) { + int usages = 0; + for (FieldInfo fi : infos) { + if (fi.getIndexOptions() != IndexOptions.NONE) { + usages++; + } + if (fi.hasNorms()) { + usages++; + } + if (fi.getDocValuesType() != DocValuesType.NONE) { + usages++; + } + if (fi.getPointDimensionCount() > 0) { + usages++; + } + if (fi.getVectorDimension() > 0) { + usages++; + } + } + return usages; + } + + public int getTotalUsages() { + return totalUsages; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8f1ae42a7475c..b618ca49a3a0b 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -83,6 +83,7 @@ import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.cache.request.ShardRequestCache; import org.elasticsearch.index.codec.CodecService; +import org.elasticsearch.index.codec.FieldInfosWithUsages; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.GetResult; @@ -4093,11 +4094,20 @@ public void afterRefresh(boolean didRefresh) { try (var searcher = getEngine().acquireSearcher("shard_field_stats", Engine.SearcherScope.INTERNAL)) { int numSegments = 0; int totalFields = 0; + long usages = 0; for (LeafReaderContext leaf : searcher.getLeafContexts()) { numSegments++; - totalFields += leaf.reader().getFieldInfos().size(); + var fieldInfos = leaf.reader().getFieldInfos(); + totalFields += fieldInfos.size(); + if (fieldInfos instanceof FieldInfosWithUsages ft) { + if (usages != -1) { + usages += ft.getTotalUsages(); + } + } else { + usages = -1; + } } - shardFieldStats = new ShardFieldStats(numSegments, totalFields); + shardFieldStats = new ShardFieldStats(numSegments, totalFields, usages); } catch (AlreadyClosedException ignored) { } diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardFieldStats.java b/server/src/main/java/org/elasticsearch/index/shard/ShardFieldStats.java index 9c53abb1e95e5..c2122b9f4b0aa 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardFieldStats.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardFieldStats.java @@ -14,7 +14,9 @@ * * @param numSegments the number of segments * @param totalFields the total number of fields across the segments + * @param fieldUsages the number of usages for segment-level fields (e.g., doc_values, postings, norms, points) + * -1 if unavailable */ -public record ShardFieldStats(int numSegments, int totalFields) { +public record ShardFieldStats(int numSegments, int totalFields, long fieldUsages) { } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 28497dc4a8a6b..63bd1fd6cdeff 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1793,6 +1793,7 @@ public void testShardFieldStats() throws IOException { assertNotNull(stats); assertThat(stats.numSegments(), equalTo(0)); assertThat(stats.totalFields(), equalTo(0)); + assertThat(stats.fieldUsages(), equalTo(0L)); // index some documents int numDocs = between(1, 10); for (int i = 0; i < numDocs; i++) { @@ -1809,6 +1810,9 @@ public void testShardFieldStats() throws IOException { assertThat(stats.numSegments(), equalTo(1)); // _id, _source, _version, _primary_term, _seq_no, f1, f1.keyword, f2, f2.keyword, assertThat(stats.totalFields(), equalTo(9)); + // _id(term), _source(0), _version(dv), _primary_term(dv), _seq_no(point,dv), f1(postings,norms), + // f1.keyword(term,dv), f2(postings,norms), f2.keyword(term,dv), + assertThat(stats.fieldUsages(), equalTo(13L)); // don't re-compute on refresh without change if (randomBoolean()) { shard.refresh("test"); @@ -1838,10 +1842,15 @@ public void testShardFieldStats() throws IOException { assertThat(stats.numSegments(), equalTo(2)); // 9 + _id, _source, _version, _primary_term, _seq_no, f1, f1.keyword, f2, f2.keyword, f3, f3.keyword assertThat(stats.totalFields(), equalTo(21)); + // first segment: 13, second segment: 13 + f3(postings,norms) + f3.keyword(term,dv), and __soft_deletes to previous segment + assertThat(stats.fieldUsages(), equalTo(31L)); shard.forceMerge(new ForceMergeRequest().maxNumSegments(1).flush(true)); stats = shard.getShardFieldStats(); assertThat(stats.numSegments(), equalTo(1)); assertThat(stats.totalFields(), equalTo(12)); + // _id(term), _source(0), _version(dv), _primary_term(dv), _seq_no(point,dv), f1(postings,norms), + // f1.keyword(term,dv), f2(postings,norms), f2.keyword(term,dv), f3(postings,norms), f3.keyword(term,dv), __soft_deletes + assertThat(stats.fieldUsages(), equalTo(18L)); closeShards(shard); } From e0044a5ef35352848edb71eda1be7803b14012cf Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 12 Sep 2024 10:05:23 +1000 Subject: [PATCH 44/58] Use a dedicated test executor in MockTransportService (#112748) Instead of using the generic executor for delayed transport actions, this PR adds a new executor to schedule these actions. It helps avoid sharing executors with the node which may lead to unexpected CI failures due to unsafe future assertion. --- .../test/transport/MockTransportService.java | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index da478cbf1cb26..8e10fd08c9d42 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; @@ -81,6 +83,7 @@ import java.util.function.Supplier; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.spy; /** @@ -206,6 +209,7 @@ public static MockTransportService getInstance(String nodeName) { } private final Transport original; + private final EsThreadPoolExecutor testExecutor; /** * Build the service. @@ -302,6 +306,16 @@ private MockTransportService( Tracer.NOOP ); this.original = transport.getDelegate(); + this.testExecutor = EsExecutors.newScaling( + "mock-transport", + 0, + 4, + 30, + TimeUnit.SECONDS, + true, + EsExecutors.daemonThreadFactory("mock-transport"), + threadPool.getThreadContext() + ); } private static TransportAddress[] extractTransportAddresses(TransportService transportService) { @@ -617,7 +631,7 @@ protected void doRun() throws IOException { delay ) ); - threadPool.schedule(runnable, delay, threadPool.generic()); + threadPool.schedule(runnable, delay, testExecutor); } } } @@ -795,6 +809,8 @@ protected void doClose() throws IOException { } } catch (InterruptedException e) { throw new IllegalStateException(e); + } finally { + assertTrue(ThreadPool.terminate(testExecutor, 10, TimeUnit.SECONDS)); } } From 2046b299416e9b590f8f9acf2219251375b0edb7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:31:27 +1000 Subject: [PATCH 45/58] Mute org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT testTransportException #112779 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 777f52bb1db39..c60cee65b9163 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -211,6 +211,9 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT method: testCorruption issue: https://github.com/elastic/elasticsearch/issues/112769 +- class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT + method: testTransportException + issue: https://github.com/elastic/elasticsearch/issues/112779 # Examples: # From 9f5b528764f922d5ba8560cb09560539f1a15854 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Thu, 12 Sep 2024 14:27:00 +1000 Subject: [PATCH 46/58] Mute org.elasticsearch.script.StatsSummaryTests testEqualsAndHashCode #112439 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index c60cee65b9163..7d170a5e8db2d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,6 +214,9 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT method: testTransportException issue: https://github.com/elastic/elasticsearch/issues/112779 +- class: org.elasticsearch.script.StatsSummaryTests + method: testEqualsAndHashCode + issue: https://github.com/elastic/elasticsearch/issues/112439 # Examples: # From 6fdb78ceafb8d0d8c4b68ca106d972e64b985e27 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 12 Sep 2024 16:12:43 +1000 Subject: [PATCH 47/58] Do not throw in task enqueued by CancellableRunner (#112780) CancellableThreads#excute can throw runtime exception including cancellation. This does not work with AbstractThrottledTaskRunner which expects enqueued task to _not_ throw. This PR catches any runtime exception from CancellableThreads and hand it back to the original runnable. Resolves: #112779 --- muted-tests.yml | 3 --- .../testkit/integrity/RepositoryIntegrityVerifier.java | 6 +++++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 7d170a5e8db2d..f7ac0ad55e2e7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -211,9 +211,6 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT method: testCorruption issue: https://github.com/elastic/elasticsearch/issues/112769 -- class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT - method: testTransportException - issue: https://github.com/elastic/elasticsearch/issues/112779 - class: org.elasticsearch.script.StatsSummaryTests method: testEqualsAndHashCode issue: https://github.com/elastic/elasticsearch/issues/112439 diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java index a5c81d18071fc..e10fe2b756f78 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryIntegrityVerifier.java @@ -934,7 +934,11 @@ public void onResponse(Releasable releasable) { if (cancellableThreads.isCancelled()) { runnable.onFailure(new TaskCancelledException("task cancelled")); } else { - cancellableThreads.execute(runnable::run); + try { + cancellableThreads.execute(runnable::run); + } catch (RuntimeException e) { + runnable.onFailure(e); + } } } } From ca30b69e3346daac2761d8a34591ec5a00514513 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 12 Sep 2024 17:19:06 +1000 Subject: [PATCH 48/58] [Test] Account for auto-repairing for shard gen file (#112778) Since #112337, missing shard gen files are automatically reconstructed based on the existing shard snapshot files. If the list of shard snapshot files is complete, it means the repository is effectively not corrupted. This PR updates the test to account for this situation. Resolves: #112769 --- muted-tests.yml | 3 --- .../integrity/RepositoryVerifyIntegrityIT.java | 13 ++++++++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f7ac0ad55e2e7..8560a329f0ab7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -208,9 +208,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/cluster/stats/line_1450} issue: https://github.com/elastic/elasticsearch/issues/112732 -- class: org.elasticsearch.repositories.blobstore.testkit.integrity.RepositoryVerifyIntegrityIT - method: testCorruption - issue: https://github.com/elastic/elasticsearch/issues/112769 - class: org.elasticsearch.script.StatsSummaryTests method: testEqualsAndHashCode issue: https://github.com/elastic/elasticsearch/issues/112439 diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java index 4b0e0fdbb0955..5725b065aeb06 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/integrity/RepositoryVerifyIntegrityIT.java @@ -344,16 +344,23 @@ public void testCorruption() throws IOException { ? equalTo(testContext.indexNames().size()) : lessThan(testContext.indexNames().size()) ); - assertThat(anomalies, not(empty())); + // Missing shard generation file is automatically repaired based on the shard snapshot files. + // See also BlobStoreRepository#buildBlobStoreIndexShardSnapshots + final boolean deletedShardGen = corruptedFileType == RepositoryFileType.SHARD_GENERATION && Files.exists(corruptedFile) == false; + assertThat(anomalies, deletedShardGen ? empty() : not(empty())); assertThat(responseObjectPath.evaluate("results.total_anomalies"), greaterThanOrEqualTo(anomalies.size())); - assertEquals("fail", responseObjectPath.evaluate("results.result")); + assertEquals(deletedShardGen ? "pass" : "fail", responseObjectPath.evaluate("results.result")); // remove permitted/expected anomalies to verify that no unexpected ones were seen switch (corruptedFileType) { case SNAPSHOT_INFO -> anomalies.remove("failed to load snapshot info"); case GLOBAL_METADATA -> anomalies.remove("failed to load global metadata"); case INDEX_METADATA -> anomalies.remove("failed to load index metadata"); - case SHARD_GENERATION -> anomalies.remove("failed to load shard generation"); + case SHARD_GENERATION -> { + if (deletedShardGen == false) { + anomalies.remove("failed to load shard generation"); + } + } case SHARD_SNAPSHOT_INFO -> anomalies.remove("failed to load shard snapshot"); case SHARD_DATA -> { anomalies.remove("missing blob"); From 1bbb7391f288be7cd905efed31c0326c0a88efd1 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 12 Sep 2024 08:22:50 +0100 Subject: [PATCH 49/58] Introduce test utils for ingest pipelines (#112733) Replaces the somewhat-awkward API on `ClusterAdminClient` for manipulating ingest pipelines with some test-specific utilities that are easier to use. Relates #107984 in that this change massively reduces the noise that would otherwise result from removing the trappy timeouts in these APIs. --- .../IngestFailureStoreMetricsIT.java | 7 +- .../ingest/common/IngestRestartIT.java | 34 +-- .../ingest/common/ManyNestedPipelinesIT.java | 9 +- ...eteringParserDecoratorWithPipelinesIT.java | 7 +- .../geoip/EnterpriseGeoIpDownloaderIT.java | 34 +-- .../ingest/geoip/GeoIpDownloaderIT.java | 162 ++++++------- .../ingest/geoip/GeoIpDownloaderStatsIT.java | 30 +-- .../geoip/GeoIpProcessorNonIngestNodeIT.java | 67 +++--- .../elasticsearch/reindex/CancelTests.java | 15 +- .../action/bulk/BulkIntegrationIT.java | 21 +- .../elasticsearch/index/FinalPipelineIT.java | 64 ++--- .../ingest/IngestAsyncProcessorIT.java | 6 +- .../elasticsearch/ingest/IngestClientIT.java | 179 ++++++-------- .../ingest/IngestFileSettingsIT.java | 5 +- ...gestProcessorNotInstalledOnAllNodesIT.java | 53 +++-- .../ingest/IngestStatsNamesAndTypesIT.java | 6 +- .../SnapshotCustomPluginStateIT.java | 20 +- .../ingest/DeletePipelineRequestBuilder.java | 29 --- .../ingest/GetPipelineRequestBuilder.java | 23 -- .../ingest/PutPipelineRequestBuilder.java | 22 -- .../action/ingest/ReservedPipelineAction.java | 2 +- .../client/internal/ClusterAdminClient.java | 42 ---- .../elasticsearch/ingest/IngestService.java | 9 +- .../ingest/RestDeletePipelineAction.java | 3 +- .../action/ingest/RestGetPipelineAction.java | 7 +- .../action/ingest/RestPutPipelineAction.java | 3 +- .../ingest/PutPipelineRequestTests.java | 6 +- .../ingest/IngestServiceTests.java | 221 +++++------------- .../ingest/IngestPipelineTestUtils.java | 121 ++++++++++ .../elasticsearch/test/ESIntegTestCase.java | 52 +++++ .../test/ESSingleNodeTestCase.java | 32 +++ .../xpack/enrich/EnrichMultiNodeIT.java | 9 +- .../enrich/EnrichPolicyReindexPipeline.java | 4 +- .../xpack/enrich/BasicEnrichTests.java | 43 ++-- .../xpack/enrich/EnrichPolicyUpdateTests.java | 9 +- .../xpack/enrich/EnrichResiliencyTests.java | 46 +--- .../ml/integration/TestFeatureResetIT.java | 24 +- .../license/MachineLearningLicensingIT.java | 41 +--- .../TestFeatureLicenseTrackingIT.java | 25 +- 39 files changed, 606 insertions(+), 886 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java delete mode 100644 server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java delete mode 100644 server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java create mode 100644 test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java index 18ba5f4bc1213..66bb06ca4240a 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/IngestFailureStoreMetricsIT.java @@ -19,14 +19,11 @@ import org.elasticsearch.action.bulk.FailureStoreMetrics; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Template; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.Strings; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -319,9 +316,7 @@ private void createReroutePipeline(String destination) { } private void createPipeline(String processor) { - String pipelineDefinition = Strings.format("{\"processors\": [{%s}]}", processor); - BytesReference bytes = new BytesArray(pipelineDefinition); - clusterAdmin().putPipeline(new PutPipelineRequest(pipeline, bytes, XContentType.JSON)).actionGet(); + putJsonPipeline(pipeline, Strings.format("{\"processors\": [{%s}]}", processor)); } private void indexDocs(String dataStream, int numDocs, String pipeline) { diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java index f1c592e6e8345..4a0a55dce9483 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/IngestRestartIT.java @@ -15,14 +15,11 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Strings; @@ -79,7 +76,7 @@ public void testFailureInConditionalProcessor() { internalCluster().ensureAtLeastNumDataNodes(1); internalCluster().startMasterOnlyNode(); final String pipelineId = "foo"; - clusterAdmin().preparePutPipeline(pipelineId, new BytesArray(Strings.format(""" + putJsonPipeline(pipelineId, Strings.format(""" { "processors": [ { @@ -99,7 +96,7 @@ public void testFailureInConditionalProcessor() { } } ] - }""", MockScriptEngine.NAME)), XContentType.JSON).get(); + }""", MockScriptEngine.NAME)); Exception e = expectThrows( Exception.class, @@ -126,22 +123,16 @@ public void testScriptDisabled() throws Exception { String pipelineIdWithScript = pipelineIdWithoutScript + "_script"; internalCluster().startNode(); - BytesReference pipelineWithScript = new BytesArray(Strings.format(""" + putJsonPipeline(pipelineIdWithScript, Strings.format(""" { "processors": [ { "script": { "lang": "%s", "source": "my_script" } } ] }""", MockScriptEngine.NAME)); - BytesReference pipelineWithoutScript = new BytesArray(""" + putJsonPipeline(pipelineIdWithoutScript, """ { "processors": [ { "set": { "field": "y", "value": 0 } } ] }"""); - Consumer checkPipelineExists = (id) -> assertThat( - clusterAdmin().prepareGetPipeline(id).get().pipelines().get(0).getId(), - equalTo(id) - ); - - clusterAdmin().preparePutPipeline(pipelineIdWithScript, pipelineWithScript, XContentType.JSON).get(); - clusterAdmin().preparePutPipeline(pipelineIdWithoutScript, pipelineWithoutScript, XContentType.JSON).get(); + Consumer checkPipelineExists = (id) -> assertThat(getPipelines(id).pipelines().get(0).getId(), equalTo(id)); checkPipelineExists.accept(pipelineIdWithScript); checkPipelineExists.accept(pipelineIdWithoutScript); @@ -197,14 +188,13 @@ public void testPipelineWithScriptProcessorThatHasStoredScript() throws Exceptio putJsonStoredScript("1", Strings.format(""" {"script": {"lang": "%s", "source": "my_script"} } """, MockScriptEngine.NAME)); - BytesReference pipeline = new BytesArray(""" + putJsonPipeline("_id", """ { "processors" : [ {"set" : {"field": "y", "value": 0}}, {"script" : {"id": "1"}} ] }"""); - clusterAdmin().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); prepareIndex("index").setId("1").setSource("x", 0).setPipeline("_id").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); @@ -232,13 +222,12 @@ public void testWithDedicatedIngestNode() throws Exception { String node = internalCluster().startNode(); String ingestNode = internalCluster().startNode(onlyRole(DiscoveryNodeRole.INGEST_ROLE)); - BytesReference pipeline = new BytesArray(""" + putJsonPipeline("_id", """ { "processors" : [ {"set" : {"field": "y", "value": 0}} ] }"""); - clusterAdmin().preparePutPipeline("_id", pipeline, XContentType.JSON).get(); prepareIndex("index").setId("1").setSource("x", 0).setPipeline("_id").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); @@ -264,7 +253,7 @@ public void testWithDedicatedIngestNode() throws Exception { public void testDefaultPipelineWaitForClusterStateRecovered() throws Exception { internalCluster().startNode(); - final var pipeline = new BytesArray(""" + putJsonPipeline("test_pipeline", """ { "processors" : [ { @@ -275,8 +264,8 @@ public void testDefaultPipelineWaitForClusterStateRecovered() throws Exception { } ] }"""); + final TimeValue timeout = TimeValue.timeValueSeconds(10); - client().admin().cluster().preparePutPipeline("test_pipeline", pipeline, XContentType.JSON).get(timeout); client().admin().indices().preparePutTemplate("pipeline_template").setPatterns(Collections.singletonList("*")).setSettings(""" { "index" : { @@ -357,16 +346,13 @@ public void testForwardBulkWithSystemWritePoolDisabled() throws Exception { // Create Bulk Request createIndex("index"); - BytesReference source = new BytesArray(""" + putJsonPipeline("_id", """ { "processors" : [ {"set" : {"field": "y", "value": 0}} ] }"""); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); BulkResponse response; diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/ManyNestedPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/ManyNestedPipelinesIT.java index c9f3f023b43ef..2c9ea27805a18 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/ManyNestedPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/ingest/common/ManyNestedPipelinesIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.SimulateProcessorResult; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Strings; import org.elasticsearch.ingest.GraphStructureException; @@ -166,7 +165,7 @@ private void createChainedPipelines(String prefix, int count) { private void createChainedPipeline(String prefix, int number) { String pipelineId = prefix + "pipeline_" + number; String nextPipelineId = prefix + "pipeline_" + (number + 1); - String pipelineTemplate = """ + putJsonPipeline(pipelineId, Strings.format(""" { "processors": [ { @@ -176,9 +175,7 @@ private void createChainedPipeline(String prefix, int number) { } ] } - """; - String pipeline = Strings.format(pipelineTemplate, nextPipelineId); - clusterAdmin().preparePutPipeline(pipelineId, new BytesArray(pipeline), XContentType.JSON).get(); + """, nextPipelineId)); } private void createLastPipeline(String prefix, int number) { @@ -195,6 +192,6 @@ private void createLastPipeline(String prefix, int number) { ] } """; - clusterAdmin().preparePutPipeline(pipelineId, new BytesArray(pipeline), XContentType.JSON).get(); + putJsonPipeline(pipelineId, pipeline); } } diff --git a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java index 7f0910ea5cc4d..0b93609b3156e 100644 --- a/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java +++ b/modules/ingest-common/src/internalClusterTest/java/org/elasticsearch/plugins/internal/XContentMeteringParserDecoratorWithPipelinesIT.java @@ -10,9 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.ingest.common.IngestCommonPlugin; @@ -21,7 +18,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.Collection; @@ -44,7 +40,7 @@ public class XContentMeteringParserDecoratorWithPipelinesIT extends ESIntegTestC public void testDocumentIsReportedWithPipelines() throws Exception { hasWrappedParser = false; // pipeline adding fields, changing destination is not affecting reporting - final BytesReference pipelineBody = new BytesArray(""" + putJsonPipeline("pipeline", """ { "processors": [ { @@ -62,7 +58,6 @@ public void testDocumentIsReportedWithPipelines() throws Exception { ] } """); - clusterAdmin().putPipeline(new PutPipelineRequest("pipeline", pipelineBody, XContentType.JSON)).actionGet(); client().index( new IndexRequest(TEST_INDEX_NAME).setPipeline("pipeline") diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java index cc757c413713d..15e7299dc104f 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/EnterpriseGeoIpDownloaderIT.java @@ -19,10 +19,8 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -36,9 +34,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.RemoteTransportException; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.ClassRule; import java.io.IOException; @@ -47,7 +43,6 @@ import static org.elasticsearch.ingest.EnterpriseGeoIpTask.ENTERPRISE_GEOIP_DOWNLOADER; import static org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderTaskExecutor.MAXMIND_LICENSE_KEY_SETTING; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; public class EnterpriseGeoIpDownloaderIT extends ESIntegTestCase { @@ -155,31 +150,24 @@ private void configureDatabase(String databaseType) throws Exception { } private void createGeoIpPipeline(String pipelineName, String databaseType, String sourceField, String targetField) throws IOException { - final BytesReference bytes; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); + putJsonPipeline(pipelineName, (builder, params) -> { + builder.field("description", "test"); + builder.startArray("processors"); { - builder.field("description", "test"); - builder.startArray("processors"); + builder.startObject(); { - builder.startObject(); + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", sourceField); - builder.field("target_field", targetField); - builder.field("database_file", databaseType + ".mmdb"); - } - builder.endObject(); + builder.field("field", sourceField); + builder.field("target_field", targetField); + builder.field("database_file", databaseType + ".mmdb"); } builder.endObject(); } - builder.endArray(); + builder.endObject(); } - builder.endObject(); - bytes = BytesReference.bytes(builder); - } - assertAcked(clusterAdmin().putPipeline(new PutPipelineRequest(pipelineName, bytes, XContentType.JSON)).actionGet()); + return builder.endArray(); + }); } private String ingestDocument(String indexName, String pipelineName, String sourceField) { diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index d994bd70eb7a0..41d711be2dee9 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -530,91 +530,84 @@ private void putGeoIpPipeline(String pipelineId) throws IOException { * @throws IOException */ private void putGeoIpPipeline(String pipelineId, boolean downloadDatabaseOnPipelineCreation) throws IOException { - BytesReference bytes; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); + putJsonPipeline(pipelineId, ((builder, params) -> { + builder.startArray("processors"); { - builder.startArray("processors"); + /* + * First we add a non-geo pipeline with a random field value. This is purely here so that each call to this method + * creates a pipeline that is unique. Creating the a pipeline twice with the same ID and exact same bytes + * results in a no-op, meaning that the pipeline won't actually be updated and won't actually trigger all of the + * things we expect it to. + */ + builder.startObject(); { - /* - * First we add a non-geo pipeline with a random field value. This is purely here so that each call to this method - * creates a pipeline that is unique. Creating the a pipeline twice with the same ID and exact same bytes - * results in a no-op, meaning that the pipeline won't actually be updated and won't actually trigger all of the - * things we expect it to. - */ - builder.startObject(); + builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); { - builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); - { - builder.field("randomField", randomAlphaOfLength(20)); - } - builder.endObject(); + builder.field("randomField", randomAlphaOfLength(20)); } builder.endObject(); + } + builder.endObject(); - builder.startObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-city"); - builder.field("database_file", "GeoLite2-City.mmdb"); - if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { - builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); - } + builder.field("field", "ip"); + builder.field("target_field", "ip-city"); + builder.field("database_file", "GeoLite2-City.mmdb"); + if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { + builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); } - builder.endObject(); } builder.endObject(); - builder.startObject(); + } + builder.endObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-country"); - builder.field("database_file", "GeoLite2-Country.mmdb"); - if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { - builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); - } + builder.field("field", "ip"); + builder.field("target_field", "ip-country"); + builder.field("database_file", "GeoLite2-Country.mmdb"); + if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { + builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); } - builder.endObject(); } builder.endObject(); - builder.startObject(); + } + builder.endObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-asn"); - builder.field("database_file", "GeoLite2-ASN.mmdb"); - if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { - builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); - } + builder.field("field", "ip"); + builder.field("target_field", "ip-asn"); + builder.field("database_file", "GeoLite2-ASN.mmdb"); + if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { + builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); } - builder.endObject(); } builder.endObject(); - builder.startObject(); + } + builder.endObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-city"); - builder.field("database_file", "MyCustomGeoLite2-City.mmdb"); - if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { - builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); - } + builder.field("field", "ip"); + builder.field("target_field", "ip-city"); + builder.field("database_file", "MyCustomGeoLite2-City.mmdb"); + if (downloadDatabaseOnPipelineCreation == false || randomBoolean()) { + builder.field("download_database_on_pipeline_creation", downloadDatabaseOnPipelineCreation); } - builder.endObject(); } builder.endObject(); } - builder.endArray(); + builder.endObject(); } - builder.endObject(); - bytes = BytesReference.bytes(builder); - } - assertAcked(clusterAdmin().preparePutPipeline(pipelineId, bytes, XContentType.JSON).get()); + return builder.endArray(); + })); } /** @@ -626,40 +619,33 @@ private void putNonGeoipPipeline(String pipelineId) throws IOException { * Adding the exact same pipeline twice is treated as a no-op. The random values that go into randomField make each pipeline * created by this method is unique to avoid this. */ - BytesReference bytes; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); + putJsonPipeline(pipelineId, ((builder, params) -> { + builder.startArray("processors"); { - builder.startArray("processors"); + builder.startObject(); { - builder.startObject(); - { - builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); - builder.field("randomField", randomAlphaOfLength(20)); - builder.endObject(); - } + builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); + builder.field("randomField", randomAlphaOfLength(20)); builder.endObject(); - builder.startObject(); - { - builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); - builder.field("randomField", randomAlphaOfLength(20)); - builder.endObject(); - } + } + builder.endObject(); + builder.startObject(); + { + builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); + builder.field("randomField", randomAlphaOfLength(20)); builder.endObject(); - builder.startObject(); - { - builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); - builder.field("randomField", randomAlphaOfLength(20)); - builder.endObject(); - } + } + builder.endObject(); + builder.startObject(); + { + builder.startObject(NonGeoProcessorsPlugin.NON_GEO_PROCESSOR_TYPE); + builder.field("randomField", randomAlphaOfLength(20)); builder.endObject(); } - builder.endArray(); + builder.endObject(); } - builder.endObject(); - bytes = BytesReference.bytes(builder); - } - assertAcked(clusterAdmin().preparePutPipeline(pipelineId, bytes, XContentType.JSON).get()); + return builder.endArray(); + })); } private List getGeoIpTmpDirs() throws IOException { diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java index ec54317e144d1..51ad7cedba98a 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java @@ -19,8 +19,6 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; import java.io.IOException; @@ -29,7 +27,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -98,30 +95,23 @@ public void testStats() throws Exception { } private void putPipeline() throws IOException { - BytesReference bytes; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); + putJsonPipeline("_id", (builder, params) -> { + builder.startArray("processors"); { - builder.startArray("processors"); + builder.startObject(); { - builder.startObject(); + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-city"); - builder.field("database_file", "GeoLite2-City.mmdb"); - } - builder.endObject(); + builder.field("field", "ip"); + builder.field("target_field", "ip-city"); + builder.field("database_file", "GeoLite2-City.mmdb"); } builder.endObject(); } - builder.endArray(); + builder.endObject(); } - builder.endObject(); - bytes = BytesReference.bytes(builder); - } - assertAcked(clusterAdmin().preparePutPipeline("_id", bytes, XContentType.JSON).get()); + return builder.endArray(); + }); } public static Map convertToMap(ToXContent part) throws IOException { diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java index f34f647a01e05..58fdc81b72ae6 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java @@ -11,22 +11,16 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.NodeRoles; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.Arrays; import java.util.Map; import static org.elasticsearch.test.NodeRoles.nonIngestNode; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; public class GeoIpProcessorNonIngestNodeIT extends AbstractGeoIpIT { @@ -43,53 +37,46 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { */ public void testLazyLoading() throws IOException { assumeFalse("https://github.com/elastic/elasticsearch/issues/37342", Constants.WINDOWS); - final BytesReference bytes; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.startObject(); + putJsonPipeline("geoip", (builder, params) -> { + builder.field("description", "test"); + builder.startArray("processors"); { - builder.field("description", "test"); - builder.startArray("processors"); + builder.startObject(); { - builder.startObject(); + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-city"); - builder.field("database_file", "GeoLite2-City.mmdb"); - } - builder.endObject(); + builder.field("field", "ip"); + builder.field("target_field", "ip-city"); + builder.field("database_file", "GeoLite2-City.mmdb"); } builder.endObject(); - builder.startObject(); + } + builder.endObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-country"); - builder.field("database_file", "GeoLite2-Country.mmdb"); - } - builder.endObject(); + builder.field("field", "ip"); + builder.field("target_field", "ip-country"); + builder.field("database_file", "GeoLite2-Country.mmdb"); } builder.endObject(); - builder.startObject(); + } + builder.endObject(); + builder.startObject(); + { + builder.startObject("geoip"); { - builder.startObject("geoip"); - { - builder.field("field", "ip"); - builder.field("target_field", "ip-asn"); - builder.field("database_file", "GeoLite2-ASN.mmdb"); - } - builder.endObject(); + builder.field("field", "ip"); + builder.field("target_field", "ip-asn"); + builder.field("database_file", "GeoLite2-ASN.mmdb"); } builder.endObject(); } - builder.endArray(); + builder.endObject(); } - builder.endObject(); - bytes = BytesReference.bytes(builder); - } - assertAcked(clusterAdmin().putPipeline(new PutPipelineRequest("geoip", bytes, XContentType.JSON)).actionGet()); + return builder.endArray(); + }); // the geo-IP databases should not be loaded on any nodes as they are all non-ingest nodes Arrays.stream(internalCluster().getNodeNames()).forEach(node -> assertDatabaseLoadStatus(node, false)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java index a2911090ab931..4c914764cdb52 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java @@ -15,9 +15,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine.Operation.Origin; @@ -35,7 +32,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.XContentType; import org.hamcrest.Matcher; import org.junit.Before; @@ -47,7 +43,6 @@ import java.util.stream.IntStream; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; @@ -231,14 +226,13 @@ public void testReindexCancel() throws Exception { } public void testUpdateByQueryCancel() throws Exception { - BytesReference pipeline = new BytesArray(""" + putJsonPipeline("set-processed", """ { "description" : "sets processed to true", "processors" : [ { "test" : {} } ] }"""); - assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); testCancel( UpdateByQueryAction.INSTANCE, @@ -250,7 +244,7 @@ public void testUpdateByQueryCancel() throws Exception { equalTo("update-by-query [" + INDEX + "]") ); - assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get()); + deletePipeline("set-processed"); } public void testDeleteByQueryCancel() throws Exception { @@ -279,14 +273,13 @@ public void testReindexCancelWithWorkers() throws Exception { } public void testUpdateByQueryCancelWithWorkers() throws Exception { - BytesReference pipeline = new BytesArray(""" + putJsonPipeline("set-processed", """ { "description" : "sets processed to true", "processors" : [ { "test" : {} } ] }"""); - assertAcked(clusterAdmin().preparePutPipeline("set-processed", pipeline, XContentType.JSON).get()); testCancel( UpdateByQueryAction.INSTANCE, @@ -298,7 +291,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { equalTo("update-by-query [" + INDEX + "]") ); - assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get()); + deletePipeline("set-processed"); } public void testDeleteByQueryCancelWithWorkers() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 38d5719287292..300ef1691a07d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -14,15 +14,12 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.ingest.IngestTestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -31,7 +28,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -39,7 +35,6 @@ import static org.elasticsearch.action.DocWriteResponse.Result.UPDATED; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -134,17 +129,11 @@ public void testBulkWithGlobalDefaults() throws Exception { } } - private void createSamplePipeline(String pipelineId) throws IOException, ExecutionException, InterruptedException { - XContentBuilder pipeline = jsonBuilder().startObject() - .startArray("processors") - .startObject() - .startObject("test") - .endObject() - .endObject() - .endArray() - .endObject(); - - assertAcked(clusterAdmin().putPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(pipeline), XContentType.JSON))); + private void createSamplePipeline(String pipelineId) throws IOException { + putJsonPipeline( + pipelineId, + (builder, params) -> builder.startArray("processors").startObject().startObject("test").endObject().endObject().endArray() + ); } /** This test ensures that index deletion makes indexing fail quickly, not wait on the index that has disappeared */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 216d5e25218e3..026bb00d69bbb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -12,12 +12,8 @@ import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; @@ -28,7 +24,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentType; import org.junit.After; import java.io.IOException; @@ -56,10 +51,9 @@ protected Collection> nodePlugins() { public void cleanUpPipelines() { indicesAdmin().prepareDelete("*").get(); - final GetPipelineResponse response = clusterAdmin().prepareGetPipeline("default_pipeline", "final_pipeline", "request_pipeline") - .get(); + final GetPipelineResponse response = getPipelines("default_pipeline", "final_pipeline", "request_pipeline"); for (final PipelineConfiguration pipeline : response.pipelines()) { - clusterAdmin().deletePipeline(new DeletePipelineRequest(pipeline.getId())).actionGet(); + deletePipeline(pipeline.getId()); } } @@ -67,9 +61,8 @@ public void testFinalPipelineCantChangeDestination() { final Settings settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("index", settings); - final BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"changing_dest": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final IllegalStateException e = expectThrows( IllegalStateException.class, @@ -87,9 +80,8 @@ public void testFinalPipelineCantRerouteDestination() { final Settings settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("index", settings); - final BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"reroute": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final IllegalStateException e = expectThrows( IllegalStateException.class, @@ -110,13 +102,11 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .build(); createIndex("index", settings); - BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"changing_dest": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"final": {"exists":"no_such_field"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); DocWriteResponse indexResponse = prepareIndex("index").setId("1") .setSource(Map.of("field", "value")) @@ -136,13 +126,11 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("target", settings); - BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"changing_dest": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"final": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); DocWriteResponse indexResponse = prepareIndex("index").setId("1") .setSource(Map.of("field", "value")) @@ -162,13 +150,11 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { settings = Settings.builder().put(IndexSettings.DEFAULT_PIPELINE.getKey(), "target_default_pipeline").build(); createIndex("target", settings); - BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"changing_dest": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - BytesReference targetPipeline = new BytesArray(""" + putJsonPipeline("target_default_pipeline", """ {"processors": [{"final": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)).actionGet(); DocWriteResponse indexResponse = prepareIndex("index").setId("1") .setSource(Map.of("field", "value")) @@ -188,13 +174,11 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { settings = Settings.builder().put(IndexSettings.DEFAULT_PIPELINE.getKey(), "target_default_pipeline").build(); createIndex("target", settings); - BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"reroute": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - BytesReference targetPipeline = new BytesArray(""" + putJsonPipeline("target_default_pipeline", """ {"processors": [{"final": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)).actionGet(); DocWriteResponse indexResponse = prepareIndex("index").setId("1") .setSource(Map.of("field", "value")) @@ -214,13 +198,11 @@ public void testAvoidIndexingLoop() { settings = Settings.builder().put(IndexSettings.DEFAULT_PIPELINE.getKey(), "target_default_pipeline").build(); createIndex("target", settings); - BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"reroute": {"dest": "target"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - BytesReference targetPipeline = new BytesArray(""" + putJsonPipeline("target_default_pipeline", """ {"processors": [{"reroute": {"dest": "index"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("target_default_pipeline", targetPipeline, XContentType.JSON)).actionGet(); IllegalStateException exception = expectThrows( IllegalStateException.class, @@ -245,12 +227,10 @@ public void testFinalPipeline() { } public void testRequestPipelineAndFinalPipeline() { - final BytesReference requestPipelineBody = new BytesArray(""" + putJsonPipeline("request_pipeline", """ {"processors": [{"request": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("request_pipeline", requestPipelineBody, XContentType.JSON)).actionGet(); - final BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"final": {"exists":"request"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final Settings settings = Settings.builder().put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline").build(); createIndex("index", settings); final IndexRequestBuilder index = prepareIndex("index").setId("1"); @@ -270,12 +250,10 @@ public void testRequestPipelineAndFinalPipeline() { } public void testDefaultAndFinalPipeline() { - final BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"default": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - final BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"final": {"exists":"default"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final Settings settings = Settings.builder() .put(IndexSettings.DEFAULT_PIPELINE.getKey(), "default_pipeline") .put(IndexSettings.FINAL_PIPELINE.getKey(), "final_pipeline") @@ -297,12 +275,10 @@ public void testDefaultAndFinalPipeline() { } public void testDefaultAndFinalPipelineFromTemplates() { - final BytesReference defaultPipelineBody = new BytesArray(""" + putJsonPipeline("default_pipeline", """ {"processors": [{"default": {}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("default_pipeline", defaultPipelineBody, XContentType.JSON)).actionGet(); - final BytesReference finalPipelineBody = new BytesArray(""" + putJsonPipeline("final_pipeline", """ {"processors": [{"final": {"exists":"default"}}]}"""); - clusterAdmin().putPipeline(new PutPipelineRequest("final_pipeline", finalPipelineBody, XContentType.JSON)).actionGet(); final int lowOrder = randomIntBetween(0, Integer.MAX_VALUE - 1); final int highOrder = randomIntBetween(lowOrder + 1, Integer.MAX_VALUE); final int finalPipelineOrder; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java index 2e515b07b59a0..828c02a2ba89c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java @@ -14,10 +14,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -56,8 +53,7 @@ protected Collection> getPlugins() { public void testAsyncProcessorImplementation() { // A pipeline with 2 processors: the test async processor and sync test processor. - BytesReference pipelineBody = new BytesArray("{\"processors\": [{\"test-async\": {}, \"test\": {}}]}"); - clusterAdmin().putPipeline(new PutPipelineRequest("_id", pipelineBody, XContentType.JSON)).actionGet(); + putJsonPipeline("_id", "{\"processors\": [{\"test-async\": {}, \"test\": {}}]}"); BulkRequest bulkRequest = new BulkRequest(); int numDocs = randomIntBetween(8, 256); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java index 9fd7aaabaf2f5..4b26240d81652 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestClientIT.java @@ -16,13 +16,12 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.common.bytes.BytesReference; @@ -30,7 +29,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import java.util.Collection; @@ -38,6 +36,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.putJsonPipelineRequest; import static org.elasticsearch.test.NodeRoles.nonIngestNode; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -63,19 +62,17 @@ protected Collection> nodePlugins() { } public void testSimulate() throws Exception { - BytesReference pipelineSource = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "_id", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - clusterAdmin().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); - GetPipelineResponse getResponse = clusterAdmin().prepareGetPipeline("_id").get(); + GetPipelineResponse getResponse = getPipelines("_id"); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); @@ -118,26 +115,22 @@ public void testSimulate() throws Exception { assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); // cleanup - AcknowledgedResponse deletePipelineResponse = clusterAdmin().prepareDeletePipeline("_id").get(); - assertTrue(deletePipelineResponse.isAcknowledged()); + deletePipeline("_id"); } public void testBulkWithIngestFailures() throws Exception { createIndex("index"); - BytesReference source = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "_id", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); int numRequests = scaledRandomIntBetween(32, 128); BulkRequest bulkRequest = new BulkRequest(); @@ -169,26 +162,22 @@ public void testBulkWithIngestFailures() throws Exception { } // cleanup - AcknowledgedResponse deletePipelineResponse = clusterAdmin().prepareDeletePipeline("_id").get(); - assertTrue(deletePipelineResponse.isAcknowledged()); + deletePipeline("_id"); } public void testBulkWithUpsert() throws Exception { createIndex("index"); - BytesReference source = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "_id", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); BulkRequest bulkRequest = new BulkRequest(); IndexRequest indexRequest = new IndexRequest("index").id("1").setPipeline("_id"); @@ -211,21 +200,18 @@ public void testBulkWithUpsert() throws Exception { } public void test() throws Exception { - BytesReference source = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "_id", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - GetPipelineResponse getResponse = clusterAdmin().prepareGetPipeline("_id").get(); + GetPipelineResponse getResponse = getPipelines("_id"); assertThat(getResponse.isFound(), is(true)); assertThat(getResponse.pipelines().size(), equalTo(1)); assertThat(getResponse.pipelines().get(0).getId(), equalTo("_id")); @@ -241,11 +227,9 @@ public void test() throws Exception { assertThat(doc.get("field"), equalTo("value2")); assertThat(doc.get("processed"), equalTo(true)); - DeletePipelineRequest deletePipelineRequest = new DeletePipelineRequest("_id"); - AcknowledgedResponse response = clusterAdmin().deletePipeline(deletePipelineRequest).get(); - assertThat(response.isAcknowledged(), is(true)); + deletePipeline("_id"); - getResponse = clusterAdmin().prepareGetPipeline("_id").get(); + getResponse = getPipelines("_id"); assertThat(getResponse.isFound(), is(false)); assertThat(getResponse.pipelines().size(), equalTo(0)); } @@ -263,29 +247,29 @@ public void testPutWithPipelineFactoryError() throws Exception { .endArray() .endObject() ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id2", source, XContentType.JSON); - Exception e = expectThrows(ElasticsearchParseException.class, clusterAdmin().putPipeline(putPipelineRequest)); + PutPipelineRequest putPipelineRequest = putJsonPipelineRequest("_id2", source); + Exception e = expectThrows( + ElasticsearchParseException.class, + client().execute(PutPipelineTransportAction.TYPE, putPipelineRequest) + ); assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); - GetPipelineResponse response = clusterAdmin().prepareGetPipeline("_id2").get(); + GetPipelineResponse response = getPipelines("_id2"); assertFalse(response.isFound()); } public void testWithDedicatedMaster() throws Exception { String masterOnlyNode = internalCluster().startMasterOnlyNode(); - BytesReference source = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "_id", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("_id", source, XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); BulkItemResponse item = client(masterOnlyNode).prepareBulk() .add(prepareIndex("test").setSource("field", "value2", "drop", true).setPipeline("_id")) @@ -296,56 +280,38 @@ public void testWithDedicatedMaster() throws Exception { } public void testPipelineOriginHeader() throws Exception { - { - XContentBuilder source = jsonBuilder().startObject(); + putJsonPipeline("1", (source, params) -> { + source.startArray("processors"); + source.startObject(); { - source.startArray("processors"); - source.startObject(); - { - source.startObject("pipeline"); - source.field("name", "2"); - source.endObject(); - } + source.startObject("pipeline"); + source.field("name", "2"); source.endObject(); - source.endArray(); } source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("1", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } - { - XContentBuilder source = jsonBuilder().startObject(); + return source.endArray(); + }); + putJsonPipeline("2", (source, params) -> { + source.startArray("processors"); + source.startObject(); { - source.startArray("processors"); - source.startObject(); - { - source.startObject("pipeline"); - source.field("name", "3"); - source.endObject(); - } + source.startObject("pipeline"); + source.field("name", "3"); source.endObject(); - source.endArray(); } source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("2", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } - { - XContentBuilder source = jsonBuilder().startObject(); + return source.endArray(); + }); + putJsonPipeline("3", (source, params) -> { + source.startArray("processors"); + source.startObject(); { - source.startArray("processors"); - source.startObject(); - { - source.startObject("fail"); - source.endObject(); - } + source.startObject("fail"); source.endObject(); - source.endArray(); } source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("3", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } + return source.endArray(); + }); Exception e = expectThrows(Exception.class, () -> { IndexRequest indexRequest = new IndexRequest("test"); @@ -359,8 +325,7 @@ public void testPipelineOriginHeader() throws Exception { } public void testPipelineProcessorOnFailure() throws Exception { - { - XContentBuilder source = jsonBuilder().startObject(); + putJsonPipeline("1", (source, params) -> { { source.startArray("processors"); source.startObject(); @@ -382,43 +347,29 @@ public void testPipelineProcessorOnFailure() throws Exception { source.endObject(); source.endArray(); } - source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("1", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } - { - XContentBuilder source = jsonBuilder().startObject(); + return source; + }); + putJsonPipeline("2", (source, params) -> { + source.startArray("processors"); + source.startObject(); { - source.startArray("processors"); - source.startObject(); - { - source.startObject("pipeline"); - source.field("name", "3"); - source.endObject(); - } + source.startObject("pipeline"); + source.field("name", "3"); source.endObject(); - source.endArray(); } source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("2", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } - { - XContentBuilder source = jsonBuilder().startObject(); + return source.endArray(); + }); + putJsonPipeline("3", (source, params) -> { + source.startArray("processors"); + source.startObject(); { - source.startArray("processors"); - source.startObject(); - { - source.startObject("fail"); - source.endObject(); - } + source.startObject("fail"); source.endObject(); - source.endArray(); } source.endObject(); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("3", BytesReference.bytes(source), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).get(); - } + return source.endArray(); + }); prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).setPipeline("1").get(); Map inserted = client().prepareGet("test", "1").get().getSourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestFileSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestFileSettingsIT.java index 0fa1ef1208593..5ec3e18d124e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestFileSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestFileSettingsIT.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.plugins.Plugin; @@ -41,6 +40,8 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; +import static org.elasticsearch.common.bytes.BytesReference.bytes; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.putJsonPipelineRequest; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -252,7 +253,7 @@ private PutPipelineRequest sampleRestRequest(String id) throws Exception { var builder = XContentFactory.contentBuilder(JSON) ) { builder.map(parser.map()); - return new PutPipelineRequest(id, BytesReference.bytes(builder), JSON); + return putJsonPipelineRequest(id, bytes(builder)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java index 5f036681f8492..08c3d690ef472 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestProcessorNotInstalledOnAllNodesIT.java @@ -9,12 +9,13 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.node.NodeService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.util.Arrays; @@ -24,7 +25,6 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.ClusterScope(numDataNodes = 0, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) @@ -51,7 +51,7 @@ protected Collection> nodePlugins() { return installPlugin ? Arrays.asList(IngestTestPlugin.class) : Collections.emptyList(); } - public void testFailPipelineCreation() throws Exception { + public void testFailPipelineCreation() { installPlugin = true; String node1 = internalCluster().startNode(); installPlugin = false; @@ -59,12 +59,22 @@ public void testFailPipelineCreation() throws Exception { ensureStableCluster(2, node1); ensureStableCluster(2, node2); - try { - clusterAdmin().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); - fail("exception expected"); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), containsString("Processor type [test] is not installed on node")); - } + assertThat( + asInstanceOf( + ElasticsearchParseException.class, + ExceptionsHelper.unwrapCause( + safeAwaitFailure( + AcknowledgedResponse.class, + l -> client().execute( + PutPipelineTransportAction.TYPE, + IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), + l + ) + ) + ) + ).getMessage(), + containsString("Processor type [test] is not installed on node") + ); } public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws Exception { @@ -72,12 +82,22 @@ public void testFailPipelineCreationProcessorNotInstalledOnMasterNode() throws E installPlugin = true; internalCluster().startNode(); - try { - clusterAdmin().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); - fail("exception expected"); - } catch (ElasticsearchParseException e) { - assertThat(e.getMessage(), equalTo("No processor type exists with name [test]")); - } + assertThat( + asInstanceOf( + ElasticsearchParseException.class, + ExceptionsHelper.unwrapCause( + safeAwaitFailure( + AcknowledgedResponse.class, + l -> client().execute( + PutPipelineTransportAction.TYPE, + IngestPipelineTestUtils.putJsonPipelineRequest("id", pipelineSource), + l + ) + ) + ) + ).getMessage(), + equalTo("No processor type exists with name [test]") + ); } // If there is pipeline defined and a node joins that doesn't have the processor installed then @@ -86,8 +106,7 @@ public void testFailStartNode() throws Exception { installPlugin = true; String node1 = internalCluster().startNode(); - AcknowledgedResponse response = clusterAdmin().preparePutPipeline("_id", pipelineSource, XContentType.JSON).get(); - assertThat(response.isAcknowledged(), is(true)); + putJsonPipeline("_id", pipelineSource); Pipeline pipeline = internalCluster().getInstance(NodeService.class, node1).getIngestService().getPipeline("_id"); assertThat(pipeline, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java index 86e1d2e332f36..63a16eae6e1ec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java @@ -14,10 +14,7 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptPlugin; @@ -92,8 +89,7 @@ public void testIngestStatsNamesAndTypes() throws IOException { ] } """, MockScriptEngine.NAME, MockScriptEngine.NAME); - BytesReference pipeline1Reference = new BytesArray(pipeline1); - clusterAdmin().putPipeline(new PutPipelineRequest("pipeline1", pipeline1Reference, XContentType.JSON)).actionGet(); + putJsonPipeline("pipeline1", pipeline1); // index a single document through the pipeline BulkRequest bulkRequest = new BulkRequest(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotCustomPluginStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotCustomPluginStateIT.java index 8f2702099c102..a3f1f0038a03f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotCustomPluginStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotCustomPluginStateIT.java @@ -18,15 +18,12 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.ingest.IngestTestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.StoredScriptsIT; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentType; import java.util.Arrays; import java.util.Collection; @@ -36,7 +33,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertIndexTemplateMissing; -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -84,18 +80,16 @@ public void testIncludeGlobalState() throws Exception { if (testPipeline) { logger.info("--> creating test pipeline"); - BytesReference pipelineSource = BytesReference.bytes( - jsonBuilder().startObject() - .field("description", "my_pipeline") + putJsonPipeline( + "barbaz", + (builder, params) -> builder.field("description", "my_pipeline") .startArray("processors") .startObject() .startObject("test") .endObject() .endObject() .endArray() - .endObject() ); - assertAcked(clusterAdmin().preparePutPipeline("barbaz", pipelineSource, XContentType.JSON).get()); } if (testScript) { @@ -144,7 +138,7 @@ public void testIncludeGlobalState() throws Exception { if (testPipeline) { logger.info("--> delete test pipeline"); - assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("barbaz")).get()); + deletePipeline("barbaz"); } if (testScript) { @@ -184,7 +178,7 @@ public void testIncludeGlobalState() throws Exception { if (testPipeline) { logger.info("--> check that pipeline is restored"); - GetPipelineResponse getPipelineResponse = clusterAdmin().prepareGetPipeline("barbaz").get(); + GetPipelineResponse getPipelineResponse = getPipelines("barbaz"); assertTrue(getPipelineResponse.isFound()); } @@ -218,7 +212,7 @@ public void testIncludeGlobalState() throws Exception { cluster().wipeTemplates("test-template"); } if (testPipeline) { - assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("barbaz")).get()); + deletePipeline("barbaz"); } if (testScript) { @@ -245,7 +239,7 @@ public void testIncludeGlobalState() throws Exception { logger.info("--> check that global state wasn't restored but index was"); getIndexTemplatesResponse = indicesAdmin().prepareGetTemplates().get(); assertIndexTemplateMissing(getIndexTemplatesResponse, "test-template"); - assertFalse(clusterAdmin().prepareGetPipeline("barbaz").get().isFound()); + assertFalse(getPipelines("barbaz").isFound()); assertNull(safeExecute(GetStoredScriptAction.INSTANCE, new GetStoredScriptRequest(TEST_REQUEST_TIMEOUT, "foobar")).getSource()); assertDocCount("test-idx", 100L); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java deleted file mode 100644 index ef08f64765f98..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequestBuilder.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.internal.ElasticsearchClient; - -public class DeletePipelineRequestBuilder extends ActionRequestBuilder { - - public DeletePipelineRequestBuilder(ElasticsearchClient client, String id) { - super(client, DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(id)); - } - - /** - * Sets the id of the pipeline to delete. - */ - public DeletePipelineRequestBuilder setId(String id) { - request.setId(id); - return this; - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java deleted file mode 100644 index ca873c5aa3843..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineRequestBuilder.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; -import org.elasticsearch.client.internal.ElasticsearchClient; - -public class GetPipelineRequestBuilder extends MasterNodeReadOperationRequestBuilder< - GetPipelineRequest, - GetPipelineResponse, - GetPipelineRequestBuilder> { - - public GetPipelineRequestBuilder(ElasticsearchClient client, String[] ids) { - super(client, GetPipelineAction.INSTANCE, new GetPipelineRequest(ids)); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java deleted file mode 100644 index 2fce285d83f06..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequestBuilder.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.action.ActionRequestBuilder; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentType; - -public class PutPipelineRequestBuilder extends ActionRequestBuilder { - - public PutPipelineRequestBuilder(ElasticsearchClient client, String id, BytesReference source, XContentType xContentType) { - super(client, PutPipelineTransportAction.TYPE, new PutPipelineRequest(id, source, xContentType)); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/ingest/ReservedPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/ReservedPipelineAction.java index aca9bb81fb53f..ba0c06db968e9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/ReservedPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/ReservedPipelineAction.java @@ -100,7 +100,7 @@ public TransformState transform(Object source, TransformState prevState) throws toDelete.removeAll(entities); for (var pipelineToDelete : toDelete) { - var task = new IngestService.DeletePipelineClusterStateUpdateTask(pipelineToDelete); + var task = new IngestService.DeletePipelineClusterStateUpdateTask(null, new DeletePipelineRequest(pipelineToDelete)); state = wrapIngestTaskExecute(task, state); } diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index 1509e398fbffa..95dd1ccdf86de 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -94,16 +94,6 @@ import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.action.admin.cluster.stats.TransportClusterStatsAction; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.DeletePipelineRequestBuilder; -import org.elasticsearch.action.ingest.DeletePipelineTransportAction; -import org.elasticsearch.action.ingest.GetPipelineAction; -import org.elasticsearch.action.ingest.GetPipelineRequest; -import org.elasticsearch.action.ingest.GetPipelineRequestBuilder; -import org.elasticsearch.action.ingest.GetPipelineResponse; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequestBuilder; -import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.ingest.SimulatePipelineRequestBuilder; @@ -370,38 +360,6 @@ public SnapshotsStatusRequestBuilder prepareSnapshotStatus(TimeValue masterNodeT return new SnapshotsStatusRequestBuilder(this, masterNodeTimeout); } - public void putPipeline(PutPipelineRequest request, ActionListener listener) { - execute(PutPipelineTransportAction.TYPE, request, listener); - } - - public ActionFuture putPipeline(PutPipelineRequest request) { - return execute(PutPipelineTransportAction.TYPE, request); - } - - public PutPipelineRequestBuilder preparePutPipeline(String id, BytesReference source, XContentType xContentType) { - return new PutPipelineRequestBuilder(this, id, source, xContentType); - } - - public void deletePipeline(DeletePipelineRequest request, ActionListener listener) { - execute(DeletePipelineTransportAction.TYPE, request, listener); - } - - public ActionFuture deletePipeline(DeletePipelineRequest request) { - return execute(DeletePipelineTransportAction.TYPE, request); - } - - public DeletePipelineRequestBuilder prepareDeletePipeline(String id) { - return new DeletePipelineRequestBuilder(this, id); - } - - public void getPipeline(GetPipelineRequest request, ActionListener listener) { - execute(GetPipelineAction.INSTANCE, request, listener); - } - - public GetPipelineRequestBuilder prepareGetPipeline(String... ids) { - return new GetPipelineRequestBuilder(this, ids); - } - public void simulatePipeline(SimulatePipelineRequest request, ActionListener listener) { execute(SimulatePipelineAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 20f97e1871483..c54bdbf71a38a 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -323,18 +323,11 @@ public void delete(DeletePipelineRequest request, ActionListener listener, DeletePipelineRequest request) { + public DeletePipelineClusterStateUpdateTask(ActionListener listener, DeletePipelineRequest request) { super(listener); this.request = request; } - /** - * Used by the {@link org.elasticsearch.action.ingest.ReservedPipelineAction} - */ - public DeletePipelineClusterStateUpdateTask(String id) { - this(null, new DeletePipelineRequest(id)); - } - @Override public IngestMetadata execute(IngestMetadata currentIngestMetadata, Collection allIndexMetadata) { if (currentIngestMetadata == null) { diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java index da22a211bd58f..2659fed00879e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestDeletePipelineAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -40,6 +41,6 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl DeletePipelineRequest request = new DeletePipelineRequest(restRequest.param("id")); request.masterNodeTimeout(getMasterNodeTimeout(restRequest)); request.ackTimeout(getAckTimeout(restRequest)); - return channel -> client.admin().cluster().deletePipeline(request, new RestToXContentListener<>(channel)); + return channel -> client.execute(DeletePipelineTransportAction.TYPE, request, new RestToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java index d6712b44f3e03..6f8d8ce926ae6 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestGetPipelineAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.rest.action.ingest; +import org.elasticsearch.action.ingest.GetPipelineAction; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.client.internal.node.NodeClient; @@ -44,6 +45,10 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl Strings.splitStringByCommaToArray(restRequest.param("id")) ); request.masterNodeTimeout(getMasterNodeTimeout(restRequest)); - return channel -> client.admin().cluster().getPipeline(request, new RestToXContentListener<>(channel, GetPipelineResponse::status)); + return channel -> client.execute( + GetPipelineAction.INSTANCE, + request, + new RestToXContentListener<>(channel, GetPipelineResponse::status) + ); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java index 520855b8987cd..e58a6e62d689b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestPutPipelineAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.ingest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Tuple; @@ -58,6 +59,6 @@ public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient cl PutPipelineRequest request = new PutPipelineRequest(restRequest.param("id"), sourceTuple.v2(), sourceTuple.v1(), ifVersion); request.masterNodeTimeout(getMasterNodeTimeout(restRequest)); request.ackTimeout(getAckTimeout(restRequest)); - return channel -> client.admin().cluster().putPipeline(request, new RestToXContentListener<>(channel)); + return channel -> client.execute(PutPipelineTransportAction.TYPE, request, new RestToXContentListener<>(channel)); } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java b/server/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java index 576b9fb7224d5..9e021e4ec1b91 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/PutPipelineRequestTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -19,12 +18,13 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.nio.charset.StandardCharsets; + +import static org.elasticsearch.ingest.IngestPipelineTestUtils.putJsonPipelineRequest; public class PutPipelineRequestTests extends ESTestCase { public void testSerializationWithXContent() throws IOException { - PutPipelineRequest request = new PutPipelineRequest("1", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON); + PutPipelineRequest request = putJsonPipelineRequest("1", "{}"); assertEquals(XContentType.JSON, request.getXContentType()); BytesStreamOutput output = new BytesStreamOutput(); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index 5c07c2344cf13..635be15ff8990 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -97,6 +97,7 @@ import static org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils.executeAndAssertSuccessful; import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.IngestPipelineTestUtils.putJsonPipelineRequest; import static org.elasticsearch.ingest.IngestService.NOOP_PIPELINE_NAME; import static org.elasticsearch.ingest.IngestService.hasPipeline; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -422,8 +423,8 @@ public void testDelete() { public void testValidateNoIngestInfo() throws Exception { IngestService ingestService = createWithProcessors(); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" - {"processors": [{"set" : {"field": "_field", "value": "_value"}}]}"""), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + {"processors": [{"set" : {"field": "_field", "value": "_value"}}]}"""); var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); Exception e = expectThrows( @@ -511,7 +512,7 @@ public void testGetProcessorsInPipeline() throws Exception { assertThat(pipeline, nullValue()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ { "processors": [ { @@ -528,7 +529,7 @@ public void testGetProcessorsInPipeline() throws Exception { } } ] - }"""), XContentType.JSON); + }"""); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -554,7 +555,7 @@ public void testGetPipelineWithProcessorType() throws Exception { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); ClusterState previousClusterState = clusterState; - PutPipelineRequest putRequest1 = new PutPipelineRequest("_id1", new BytesArray(""" + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", """ { "processors": [ { @@ -571,10 +572,10 @@ public void testGetPipelineWithProcessorType() throws Exception { } } ] - }"""), XContentType.JSON); + }"""); clusterState = executePut(putRequest1, clusterState); - PutPipelineRequest putRequest2 = new PutPipelineRequest("_id2", new BytesArray(""" - {"processors": [{"set" : {"field": "_field", "value": "_value", "tag": "tag2"}}]}"""), XContentType.JSON); + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", """ + {"processors": [{"set" : {"field": "_field", "value": "_value", "tag": "tag2"}}]}"""); clusterState = executePut(putRequest2, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -611,8 +612,8 @@ public String getType() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); ClusterState previousClusterState = clusterState; - PutPipelineRequest putRequest1 = new PutPipelineRequest("_id1", new BytesArray(""" - {"processors": [{"set" : {"field": "_field", "value": "_value", "tag": "tag1"}}]}"""), XContentType.JSON); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", """ + {"processors": [{"set" : {"field": "_field", "value": "_value", "tag": "tag1"}}]}"""); clusterState = executePut(putRequest1, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -676,8 +677,8 @@ public void testGetProcessorsInPipelineComplexConditional() throws Exception { assertThat(pipeline, nullValue()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray(""" - {"processors": [{"complexSet" : {"field": "_field", "value": "_value"}}]}"""), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest(id, """ + {"processors": [{"complexSet" : {"field": "_field", "value": "_value"}}]}"""); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -699,8 +700,8 @@ public void testCrud() throws Exception { assertThat(pipeline, nullValue()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty - PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray(""" - {"processors": [{"set" : {"field": "_field", "value": "_value"}}]}"""), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest(id, """ + {"processors": [{"set" : {"field": "_field", "value": "_value"}}]}"""); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -727,7 +728,7 @@ public void testPut() { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // add a new pipeline: - PutPipelineRequest putRequest = new PutPipelineRequest(id, new BytesArray("{\"processors\": []}"), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest(id, "{\"processors\": []}"); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -738,8 +739,8 @@ public void testPut() { assertThat(pipeline.getProcessors().size(), equalTo(0)); // overwrite existing pipeline: - putRequest = new PutPipelineRequest(id, new BytesArray(""" - {"processors": [], "description": "_description"}"""), XContentType.JSON); + putRequest = putJsonPipelineRequest(id, """ + {"processors": [], "description": "_description"}"""); previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -757,11 +758,7 @@ public void testPutWithErrorResponse() throws IllegalAccessException { assertThat(pipeline, nullValue()); ClusterState previousClusterState = ClusterState.builder(new ClusterName("_name")).build(); - PutPipelineRequest putRequest = new PutPipelineRequest( - id, - new BytesArray("{\"description\": \"empty processors\"}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest(id, "{\"description\": \"empty processors\"}"); ClusterState clusterState = executePut(putRequest, previousClusterState); MockLog.assertThatLogger( () -> ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)), @@ -962,7 +959,7 @@ public void testGetPipelines() { public void testValidateProcessorTypeOnAllNodes() throws Exception { IngestService ingestService = createWithProcessors(); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ { "processors": [ { @@ -979,7 +976,7 @@ public void testValidateProcessorTypeOnAllNodes() throws Exception { } } ] - }"""), XContentType.JSON); + }"""); var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); DiscoveryNode node1 = DiscoveryNodeUtils.create("_node_id1", buildNewFakeTransportAddress(), Map.of(), Set.of()); @@ -1006,7 +1003,7 @@ public void testValidateConfigurationExceptions() { // ordinary validation issues happen at processor construction time throw newConfigurationException("fail_validation", tag, "no_property_name", "validation failure reason"); })); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ { "processors": [ { @@ -1014,7 +1011,7 @@ public void testValidateConfigurationExceptions() { } } ] - }"""), XContentType.JSON); + }"""); var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); // other validation actually consults this map, but this validation does not. however, it must not be empty. @@ -1040,7 +1037,7 @@ public void extraValidation() throws Exception { } }; })); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ { "processors": [ { @@ -1048,7 +1045,7 @@ public void extraValidation() throws Exception { } } ] - }"""), XContentType.JSON); + }"""); var pipelineConfig = XContentHelper.convertToMap(putRequest.getSource(), false, putRequest.getXContentType()).v2(); // other validation actually consults this map, but this validation does not. however, it must not be empty. @@ -1080,11 +1077,7 @@ public String getType() { ); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty String id = "_id"; - PutPipelineRequest putRequest = new PutPipelineRequest( - id, - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest(id, "{\"processors\": [{\"mock\" : {}}]}"); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -1129,11 +1122,7 @@ public void testExecuteBulkPipelineDoesNotExist() { Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor()) ); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1201,11 +1190,7 @@ public XContentParser decorate(XContentParser xContentParser) { documentParsingProvider ); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1239,11 +1224,7 @@ public void testExecuteSuccess() { IngestService ingestService = createWithProcessors( Map.of("mock", (factories, tag, description, config) -> mockCompoundProcessor()) ); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1282,11 +1263,7 @@ public void testDynamicTemplates() throws Exception { ) ) ); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"set\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"set\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1314,8 +1291,8 @@ public void testDynamicTemplates() throws Exception { public void testExecuteEmptyPipeline() throws Exception { IngestService ingestService = createWithProcessors(Map.of()); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" - {"processors": [], "description": "_description"}"""), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + {"processors": [], "description": "_description"}"""); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1345,11 +1322,7 @@ public void testExecuteEmptyPipeline() throws Exception { public void testExecutePropagateAllMetadataUpdates() throws Exception { final CompoundProcessor processor = mockCompoundProcessor(); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> processor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1423,17 +1396,9 @@ public void testExecuteFailure() throws Exception { (factories, tag, description, config) -> new FakeProcessor("set", "", "", (ingestDocument) -> fail()) ) ); - PutPipelineRequest putRequest1 = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); // given that set -> fail() above, it's a failure if a document executes against this pipeline - PutPipelineRequest putRequest2 = new PutPipelineRequest( - "_id2", - new BytesArray("{\"processors\": [{\"set\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"set\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest1, clusterState); @@ -1490,11 +1455,7 @@ public void testExecuteSuccessWithOnFailure() throws Exception { List.of(new CompoundProcessor(onFailureProcessor)) ); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> compoundProcessor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1536,11 +1497,7 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { List.of(new CompoundProcessor(false, processors, onFailureProcessors)) ); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> compoundProcessor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1608,11 +1565,7 @@ public void testBulkRequestExecutionWithFailures() throws Exception { return null; }).when(processor).execute(any(), any()); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> processor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1647,17 +1600,9 @@ public void testExecuteFailureRedirection() throws Exception { (factories, tag, description, config) -> new FakeProcessor("set", "", "", (ingestDocument) -> fail()) ) ); - PutPipelineRequest putRequest1 = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); // given that set -> fail() above, it's a failure if a document executes against this pipeline - PutPipelineRequest putRequest2 = new PutPipelineRequest( - "_id2", - new BytesArray("{\"processors\": [{\"set\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"set\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest1, clusterState); @@ -1707,11 +1652,7 @@ public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception List.of(new CompoundProcessor(false, processors, onFailureProcessors)) ); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> compoundProcessor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1783,11 +1724,7 @@ public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { return null; }).when(processor).execute(any(), any()); IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> processor)); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1849,8 +1786,8 @@ public void testBulkRequestExecution() throws Exception { map.put("mock", (factories, tag, description, config) -> processor); IngestService ingestService = createWithProcessors(map); - PutPipelineRequest putRequest = new PutPipelineRequest("_id", new BytesArray(""" - {"processors": [{"mock": {}}], "description": "_description"}"""), XContentType.JSON); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", """ + {"processors": [{"mock": {}}], "description": "_description"}"""); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -1929,22 +1866,10 @@ public String execute() { } // put some pipelines, and now there are pipeline and processor stats, too - PutPipelineRequest putRequest1 = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); // n.b. this 'pipeline' processor will always run the '_id3' pipeline, see the mocking/plumbing above and below - PutPipelineRequest putRequest2 = new PutPipelineRequest( - "_id2", - new BytesArray("{\"processors\": [{\"pipeline\" : {}}]}"), - XContentType.JSON - ); - PutPipelineRequest putRequest3 = new PutPipelineRequest( - "_id3", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"pipeline\" : {}}]}"); + PutPipelineRequest putRequest3 = putJsonPipelineRequest("_id3", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest1, clusterState); @@ -2035,16 +1960,12 @@ public void testStats() throws Exception { assertThat(initialStats.pipelineStats().size(), equalTo(0)); assertStats(initialStats.totalStats(), 0, 0, 0); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); - putRequest = new PutPipelineRequest("_id2", new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), XContentType.JSON); + putRequest = putJsonPipelineRequest("_id2", "{\"processors\": [{\"mock\" : {}}]}"); previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -2109,11 +2030,7 @@ public void testStats() throws Exception { assertProcessorStats(0, afterSecondRequestStats, "_id2", 1, 0, 0); // update cluster state and ensure that new stats are added to old stats - putRequest = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}, {\"mock\" : {}}]}"), - XContentType.JSON - ); + putRequest = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}, {\"mock\" : {}}]}"); previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -2146,8 +2063,8 @@ public void testStats() throws Exception { assertProcessorStats(0, afterThirdRequestStats, "_id2", 1, 0, 0); // test a failure, and that the processor stats are added from the old stats - putRequest = new PutPipelineRequest("_id1", new BytesArray(""" - {"processors": [{"failure-mock" : { "on_failure": [{"mock" : {}}]}}, {"mock" : {}}]}"""), XContentType.JSON); + putRequest = putJsonPipelineRequest("_id1", """ + {"processors": [{"failure-mock" : { "on_failure": [{"mock" : {}}]}}, {"mock" : {}}]}"""); previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -2177,7 +2094,7 @@ public void testStats() throws Exception { assertProcessorStats(0, afterForthRequestStats, "_id2", 1, 0, 0); // test with drop processor - putRequest = new PutPipelineRequest("_id3", new BytesArray("{\"processors\": [{\"drop\" : {}}]}"), XContentType.JSON); + putRequest = putJsonPipelineRequest("_id3", "{\"processors\": [{\"drop\" : {}}]}"); previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); @@ -2255,11 +2172,7 @@ public String getDescription() { } }); IngestService ingestService = createWithProcessors(factories); - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"drop\" : {}}, {\"mock\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"drop\" : {}}, {\"mock\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -2330,11 +2243,7 @@ public Map getProcessors(Processor.Parameters paramet ingestService.addIngestClusterStateListener(ingestClusterStateListener); // Create pipeline and apply the resulting cluster state, which should update the counter in the right order: - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"test\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"test\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest, clusterState); @@ -2353,11 +2262,7 @@ public void testCBORParsing() throws Exception { ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); ClusterState previousClusterState = clusterState; - PutPipelineRequest putRequest = new PutPipelineRequest( - "_id", - new BytesArray("{\"processors\": [{\"foo\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest = putJsonPipelineRequest("_id", "{\"processors\": [{\"foo\" : {}}]}"); clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); assertThat(ingestService.getPipeline("_id"), notNullValue()); @@ -2403,16 +2308,8 @@ public void testSetsRawTimestamp() { ) ); - PutPipelineRequest putRequest1 = new PutPipelineRequest( - "_id1", - new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), - XContentType.JSON - ); - PutPipelineRequest putRequest2 = new PutPipelineRequest( - "_id2", - new BytesArray("{\"processors\": [{\"set\" : {}}]}"), - XContentType.JSON - ); + PutPipelineRequest putRequest1 = putJsonPipelineRequest("_id1", "{\"processors\": [{\"mock\" : {}}]}"); + PutPipelineRequest putRequest2 = putJsonPipelineRequest("_id2", "{\"processors\": [{\"set\" : {}}]}"); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty ClusterState previousClusterState = clusterState; clusterState = executePut(putRequest1, clusterState); @@ -2724,7 +2621,7 @@ public long getExecutionCount() { } }; - var request = new PutPipelineRequest(pipelineId, new BytesArray(pipelineString), XContentType.JSON); + var request = putJsonPipelineRequest(pipelineId, pipelineString); ingestService.putPipeline(request, listener, consumer); latch.await(); diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java new file mode 100644 index 0000000000000..1acd5b7d637cf --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; + +import static org.elasticsearch.test.ESTestCase.safeGet; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +/** + * Utils for creating/retrieving/deleting ingest pipelines in a test cluster. + */ +public class IngestPipelineTestUtils { + private static final Logger logger = LogManager.getLogger(IngestPipelineTestUtils.class); + + private IngestPipelineTestUtils() { /* no instances */ } + + /** + * @param id The pipeline id. + * @param source The body of the {@link PutPipelineRequest} as a JSON-formatted {@link BytesReference}. + * @return a new {@link PutPipelineRequest} with the given {@code id} and body. + */ + public static PutPipelineRequest putJsonPipelineRequest(String id, BytesReference source) { + return new PutPipelineRequest(id, source, XContentType.JSON); + } + + /** + * @param id The pipeline id. + * @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}. + * @return a new {@link PutPipelineRequest} with the given {@code id} and body. + */ + public static PutPipelineRequest putJsonPipelineRequest(String id, String jsonString) { + return putJsonPipelineRequest(id, new BytesArray(jsonString)); + } + + /** + * Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}. + * + * @param client The client to use to execute the {@link PutPipelineTransportAction}. + * @param id The pipeline id. + * @param source The body of the {@link PutPipelineRequest} as a JSON-formatted {@link BytesReference}. + */ + public static void putJsonPipeline(ElasticsearchClient client, String id, BytesReference source) { + assertAcked(safeGet(client.execute(PutPipelineTransportAction.TYPE, putJsonPipelineRequest(id, source)))); + } + + /** + * Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}. + * + * @param client The client to use to execute the {@link PutPipelineTransportAction}. + * @param id The pipeline id. + * @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}. + */ + public static void putJsonPipeline(ElasticsearchClient client, String id, String jsonString) { + putJsonPipeline(client, id, new BytesArray(jsonString)); + } + + /** + * Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}. + * + * @param client The client to use to execute the {@link PutPipelineTransportAction}. + * @param id The pipeline id. + * @param toXContent The body of the {@link PutPipelineRequest} as a {@link ToXContentFragment}. + */ + public static void putJsonPipeline(ElasticsearchClient client, String id, ToXContentFragment toXContent) throws IOException { + try (var xContentBuilder = jsonBuilder()) { + xContentBuilder.startObject(); + toXContent.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + xContentBuilder.endObject(); + putJsonPipeline(client, id, BytesReference.bytes(xContentBuilder)); + } + } + + /** + * Attempt to delete the ingest pipeline with the given {@code id}, using the given {@link ElasticsearchClient}, and logging (but + * otherwise ignoring) the result. + */ + public static void deletePipelinesIgnoringExceptions(ElasticsearchClient client, Iterable ids) { + for (final var id : ids) { + ESTestCase.safeAwait( + l -> client.execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(id), new ActionListener<>() { + @Override + public void onResponse(AcknowledgedResponse acknowledgedResponse) { + logger.info("delete pipeline [{}] success [acknowledged={}]", id, acknowledgedResponse.isAcknowledged()); + l.onResponse(null); + } + + @Override + public void onFailure(Exception e) { + logger.warn(Strings.format("delete pipeline [%s] failure", id), e); + l.onResponse(null); + } + }) + ); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 71628967bf266..b41819c83ffcc 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -48,6 +48,12 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.GetPipelineAction; +import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineResponse; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -126,6 +132,7 @@ import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.store.IndicesStore; +import org.elasticsearch.ingest.IngestPipelineTestUtils; import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; @@ -150,6 +157,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; @@ -2613,4 +2621,48 @@ private static long fullyAllocate(CircuitBreaker circuitBreaker) { } return totalAllocated; } + + /** + * Create an ingest pipeline with the given ID and body, using the default {@link ESIntegTestCase#client()}. + * + * @param id The pipeline id. + * @param source The body of the {@link PutPipelineRequest} as a JSON-formatted {@link BytesReference}. + */ + protected static void putJsonPipeline(String id, BytesReference source) { + IngestPipelineTestUtils.putJsonPipeline(client(), id, source); + } + + /** + * Create an ingest pipeline with the given ID and body, using the default {@link ESIntegTestCase#client()}. + * + * @param id The pipeline id. + * @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}. + */ + protected static void putJsonPipeline(String id, String jsonString) { + IngestPipelineTestUtils.putJsonPipeline(client(), id, jsonString); + } + + /** + * Create an ingest pipeline with the given ID and body, using the default {@link ESIntegTestCase#client()}. + * + * @param id The pipeline id. + * @param toXContent The body of the {@link PutPipelineRequest} as a {@link ToXContentFragment}. + */ + protected static void putJsonPipeline(String id, ToXContentFragment toXContent) throws IOException { + IngestPipelineTestUtils.putJsonPipeline(client(), id, toXContent); + } + + /** + * @return the result of running the {@link GetPipelineAction} on the given IDs, using the default {@link ESIntegTestCase#client()}. + */ + protected static GetPipelineResponse getPipelines(String... ids) { + return safeGet(client().execute(GetPipelineAction.INSTANCE, new GetPipelineRequest(ids))); + } + + /** + * Delete the ingest pipeline with the given {@code id}, the default {@link ESIntegTestCase#client()}. + */ + protected static void deletePipeline(String id) { + assertAcked(safeGet(client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(id)))); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 6eb8e0474225a..fe9fe6395271b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -20,6 +20,9 @@ import org.elasticsearch.action.admin.indices.template.delete.TransportDeleteComposableIndexTemplateAction; import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.DeletePipelineTransportAction; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.AdminClient; @@ -45,6 +48,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; +import org.elasticsearch.ingest.IngestPipelineTestUtils; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; @@ -56,6 +60,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.transport.TransportSettings; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -493,4 +498,31 @@ protected void awaitIndexShardCloseAsyncTasks() { getInstanceFromNode(IndicesClusterStateService.class).onClusterStateShardsClosed(latch::countDown); safeAwait(latch); } + + /** + * Create an ingest pipeline with the given ID and body, using the default {@link ESSingleNodeTestCase#client()}. + * + * @param id The pipeline id. + * @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}. + */ + protected final void putJsonPipeline(String id, String jsonString) { + IngestPipelineTestUtils.putJsonPipeline(client(), id, jsonString); + } + + /** + * Create an ingest pipeline with the given ID and body, using the default {@link ESSingleNodeTestCase#client()}. + * + * @param id The pipeline id. + * @param toXContent The body of the {@link PutPipelineRequest} as a {@link ToXContentFragment}. + */ + protected final void putJsonPipeline(String id, ToXContentFragment toXContent) throws IOException { + IngestPipelineTestUtils.putJsonPipeline(client(), id, toXContent); + } + + /** + * Delete the ingest pipeline with the given {@code id}, the default {@link ESSingleNodeTestCase#client()}. + */ + protected final void deletePipeline(String id) { + assertAcked(safeGet(client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(id)))); + } } diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java index 24669b694a33b..c7eb24ba5f627 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -18,9 +18,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.Strings; @@ -29,7 +27,6 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; @@ -352,11 +349,9 @@ private static void createPipeline() { } private static void createPipeline(String policyName, String pipelineName) { - String pipelineBody = Strings.format(""" + putJsonPipeline(pipelineName, Strings.format(""" { "processors": [ { "enrich": { "policy_name": "%s", "field": "%s", "target_field": "user" } } ] - }""", policyName, MATCH_FIELD); - PutPipelineRequest request = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(request).actionGet(); + }""", policyName, MATCH_FIELD)); } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java index 5b7020c3f2bb0..8d9da1ba631f6 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyReindexPipeline.java @@ -9,6 +9,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -67,8 +68,7 @@ static boolean exists(ClusterState clusterState) { */ public static void create(Client client, ActionListener listener) { final BytesReference pipeline = BytesReference.bytes(currentEnrichPipelineDefinition(XContentType.JSON)); - final PutPipelineRequest request = new PutPipelineRequest(pipelineName(), pipeline, XContentType.JSON); - client.admin().cluster().putPipeline(request, listener); + client.execute(PutPipelineTransportAction.TYPE, new PutPipelineRequest(pipelineName(), pipeline, XContentType.JSON), listener); } private static XContentBuilder currentEnrichPipelineDefinition(XContentType xContentType) { diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java index d17728fdd8037..afb5da07268cb 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/BasicEnrichTests.java @@ -16,9 +16,7 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.ingest.common.IngestCommonPlugin; @@ -26,7 +24,6 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.EnrichStatsAction; @@ -99,7 +96,7 @@ public void testIngestDataWithMatchProcessor() { .actionGet(); String pipelineName = "my-pipeline"; - String pipelineBody = Strings.format(""" + putJsonPipeline(pipelineName, Strings.format(""" { "processors": [ { @@ -111,9 +108,7 @@ public void testIngestDataWithMatchProcessor() { } } ] - }""", policyName, MATCH_FIELD, maxMatches); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); + }""", policyName, MATCH_FIELD, maxMatches)); BulkRequest bulkRequest = new BulkRequest("my-index"); for (int i = 0; i < numDocs; i++) { @@ -195,7 +190,7 @@ public void testIngestDataWithGeoMatchProcessor() { .actionGet(); String pipelineName = "my-pipeline"; - String pipelineBody = Strings.format(""" + putJsonPipeline(pipelineName, Strings.format(""" { "processors": [ { @@ -207,9 +202,7 @@ public void testIngestDataWithGeoMatchProcessor() { } } ] - }""", policyName, matchField); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); + }""", policyName, matchField)); BulkRequest bulkRequest = new BulkRequest("my-index"); IndexRequest indexRequest = new IndexRequest(); @@ -258,12 +251,10 @@ public void testMultiplePolicies() { .actionGet(); String pipelineName = "pipeline" + i; - String pipelineBody = Strings.format(""" + putJsonPipeline(pipelineName, Strings.format(""" { "processors": [ { "enrich": { "policy_name": "%s", "field": "key", "target_field": "target" } } ] - }""", policyName); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); + }""", policyName)); } BulkRequest bulkRequest = new BulkRequest("my-index"); @@ -316,12 +307,10 @@ public void testAsyncTaskExecute() throws Exception { }); String pipelineName = "test-pipeline"; - String pipelineBody = Strings.format(""" + putJsonPipeline(pipelineName, Strings.format(""" { "processors": [ { "enrich": { "policy_name": "%s", "field": "key", "target_field": "target" } } ] - }""", policyName); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); + }""", policyName)); BulkRequest bulkRequest = new BulkRequest("my-index"); int numTestDocs = randomIntBetween(3, 10); @@ -359,13 +348,14 @@ public void testTemplating() throws Exception { .actionGet(); String pipelineName = "my-pipeline"; - String pipelineBody = Strings.format( - """ - {"processors": [{"enrich": {"policy_name":"%s", "field": "{{indirection1}}", "target_field": "{{indirection2}}"}}]}""", - policyName + putJsonPipeline( + pipelineName, + Strings.format( + """ + {"processors": [{"enrich": {"policy_name":"%s", "field": "{{indirection1}}", "target_field": "{{indirection2}}"}}]}""", + policyName + ) ); - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); IndexRequest indexRequest = new IndexRequest("my-index").id("1") .setPipeline(pipelineName) @@ -404,8 +394,7 @@ public void testFailureAfterEnrich() throws Exception { + "\", \"field\": \"email\", \"target_field\": \"users\"}}," + "{ \"foreach\": {\"field\":\"users\", \"processor\":{\"append\":{\"field\":\"matched2\",\"value\":\"{{_ingest._value}}\"}}}}" + "]}"; - PutPipelineRequest putPipelineRequest = new PutPipelineRequest(pipelineName, new BytesArray(pipelineBody), XContentType.JSON); - clusterAdmin().putPipeline(putPipelineRequest).actionGet(); + putJsonPipeline(pipelineName, pipelineBody); for (int i = 0; i < 5; i++) { IndexRequest indexRequest = new IndexRequest("my-index").id("1") diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java index b015e97909179..efbe39b244de3 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyUpdateTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.enrich; import org.elasticsearch.ResourceAlreadyExistsException; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Pipeline; @@ -16,7 +14,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; @@ -63,10 +60,8 @@ public void testUpdatePolicyOnly() { equalTo(true) ); - String pipelineConfig = """ - {"processors":[{"enrich": {"policy_name": "my_policy", "field": "key", "target_field": "target"}}]}"""; - PutPipelineRequest putPipelineRequest = new PutPipelineRequest("1", new BytesArray(pipelineConfig), XContentType.JSON); - assertAcked(clusterAdmin().putPipeline(putPipelineRequest).actionGet()); + putJsonPipeline("1", """ + {"processors":[{"enrich": {"policy_name": "my_policy", "field": "key", "target_field": "target"}}]}"""); Pipeline pipelineInstance1 = ingestService.getPipeline("1"); assertThat(pipelineInstance1.getProcessors().get(0), instanceOf(MatchProcessor.class)); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java index 3a2bfd87cff14..881c3f9245d26 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java @@ -12,10 +12,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.ingest.common.IngestCommonPlugin; @@ -23,7 +20,6 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -91,9 +87,7 @@ public void testWriteThreadLivenessBackToBack() throws Exception { new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, enrichPolicyName).setWaitForCompletion(true) ).actionGet(); - XContentBuilder pipe1 = JsonXContent.contentBuilder(); - pipe1.startObject(); - { + putJsonPipeline(enrichPipelineName, (pipe1, params) -> { pipe1.startArray("processors"); { pipe1.startObject(); @@ -119,14 +113,8 @@ public void testWriteThreadLivenessBackToBack() throws Exception { } pipe1.endObject(); } - pipe1.endArray(); - } - pipe1.endObject(); - - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest(enrichPipelineName, BytesReference.bytes(pipe1), XContentType.JSON) - ).actionGet(); + return pipe1.endArray(); + }); client().admin().indices().create(new CreateIndexRequest(enrichedIndexName)).actionGet(); @@ -191,9 +179,7 @@ public void testWriteThreadLivenessWithPipeline() throws Exception { new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, enrichPolicyName).setWaitForCompletion(true) ).actionGet(); - XContentBuilder pipe1 = JsonXContent.contentBuilder(); - pipe1.startObject(); - { + putJsonPipeline(enrichPipelineName1, (pipe1, params) -> { pipe1.startArray("processors"); { pipe1.startObject(); @@ -217,13 +203,10 @@ public void testWriteThreadLivenessWithPipeline() throws Exception { } pipe1.endObject(); } - pipe1.endArray(); - } - pipe1.endObject(); + return pipe1.endArray(); + }); - XContentBuilder pipe2 = JsonXContent.contentBuilder(); - pipe2.startObject(); - { + putJsonPipeline(enrichPipelineName2, (pipe2, params) -> { pipe2.startArray("processors"); { pipe2.startObject(); @@ -238,19 +221,8 @@ public void testWriteThreadLivenessWithPipeline() throws Exception { } pipe2.endObject(); } - pipe2.endArray(); - } - pipe2.endObject(); - - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest(enrichPipelineName1, BytesReference.bytes(pipe1), XContentType.JSON) - ).actionGet(); - - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest(enrichPipelineName2, BytesReference.bytes(pipe2), XContentType.JSON) - ).actionGet(); + return pipe2.endArray(); + }); client().admin().indices().create(new CreateIndexRequest(enrichedIndexName)).actionGet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java index da86bcf01b406..d179a28aa9890 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureResetIT.java @@ -8,12 +8,9 @@ import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateAction; import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateRequest; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.DeletePipelineTransportAction; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.ingest.IngestPipelineTestUtils; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.MlMetadata; @@ -112,13 +109,7 @@ protected void cleanUpResources() { @After public void cleanup() throws Exception { cleanUp(); - for (String pipeline : createdPipelines) { - try { - client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(pipeline)).actionGet(); - } catch (Exception ex) { - logger.warn(() -> "error cleaning up pipeline [" + pipeline + "]", ex); - } - } + IngestPipelineTestUtils.deletePipelinesIgnoringExceptions(client(), createdPipelines); } public void testMLFeatureReset() throws Exception { @@ -130,7 +121,7 @@ public void testMLFeatureReset() throws Exception { for (int i = 0; i < 100; i++) { indexDocForInference("feature_reset_inference_pipeline"); } - client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest("feature_reset_inference_pipeline")).actionGet(); + deletePipeline("feature_reset_inference_pipeline"); createdPipelines.remove("feature_reset_inference_pipeline"); assertBusy( @@ -160,8 +151,7 @@ public void testMLFeatureResetFailureDueToPipelines() throws Exception { "Unable to reset machine learning feature as there are ingest pipelines still referencing trained machine learning models" ) ); - client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest("feature_reset_failure_inference_pipeline")) - .actionGet(); + deletePipeline("feature_reset_failure_inference_pipeline"); createdPipelines.remove("feature_reset_failure_inference_pipeline"); assertThat(isResetMode(), is(false)); } @@ -294,8 +284,8 @@ private void startRealtime(String jobId) throws Exception { }, 30, TimeUnit.SECONDS); } - private void putTrainedModelIngestPipeline(String pipelineId) throws Exception { - client().execute(PutPipelineTransportAction.TYPE, new PutPipelineRequest(pipelineId, new BytesArray(""" + private void putTrainedModelIngestPipeline(String pipelineId) { + putJsonPipeline(pipelineId, """ { "processors": [ { @@ -306,7 +296,7 @@ private void putTrainedModelIngestPipeline(String pipelineId) throws Exception { } } ] - }"""), XContentType.JSON)).actionGet(); + }"""); } private void indexDocForInference(String pipelineId) { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java index 98ad515680734..08d09f70cb46b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/license/MachineLearningLicensingIT.java @@ -9,8 +9,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineTransportAction; import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; import org.elasticsearch.action.ingest.SimulatePipelineAction; import org.elasticsearch.action.ingest.SimulatePipelineRequest; @@ -527,18 +525,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { }]} """; // Creating a pipeline should work - PlainActionFuture putPipelineListener = new PlainActionFuture<>(); - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest( - "test_infer_license_pipeline", - new BytesArray(pipeline.getBytes(StandardCharsets.UTF_8)), - XContentType.JSON - ), - putPipelineListener - ); - AcknowledgedResponse putPipelineResponse = putPipelineListener.actionGet(); - assertTrue(putPipelineResponse.isAcknowledged()); + putJsonPipeline("test_infer_license_pipeline", pipeline); prepareIndex("infer_license_test").setPipeline("test_infer_license_pipeline").setSource("{}", XContentType.JSON).get(); @@ -575,18 +562,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { } // Creating a new pipeline with an inference processor should work - putPipelineListener = new PlainActionFuture<>(); - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest( - "test_infer_license_pipeline_again", - new BytesArray(pipeline.getBytes(StandardCharsets.UTF_8)), - XContentType.JSON - ), - putPipelineListener - ); - putPipelineResponse = putPipelineListener.actionGet(); - assertTrue(putPipelineResponse.isAcknowledged()); + putJsonPipeline("test_infer_license_pipeline_again", pipeline); // Inference against the new pipeline should fail since it has never previously succeeded ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { @@ -609,18 +585,7 @@ public void testMachineLearningCreateInferenceProcessorRestricted() { enableLicensing(mode); assertMLAllowed(true); // test that license restricted apis do now work - PlainActionFuture putPipelineListenerNewLicense = new PlainActionFuture<>(); - client().execute( - PutPipelineTransportAction.TYPE, - new PutPipelineRequest( - "test_infer_license_pipeline", - new BytesArray(pipeline.getBytes(StandardCharsets.UTF_8)), - XContentType.JSON - ), - putPipelineListenerNewLicense - ); - AcknowledgedResponse putPipelineResponseNewLicense = putPipelineListenerNewLicense.actionGet(); - assertTrue(putPipelineResponseNewLicense.isAcknowledged()); + putJsonPipeline("test_infer_license_pipeline", pipeline); PlainActionFuture simulatePipelineListenerNewLicense = new PlainActionFuture<>(); client().execute( diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java index 936e499e94feb..ce270c570c8cd 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java @@ -7,16 +7,10 @@ package org.elasticsearch.xpack.ml.integration; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.DeletePipelineTransportAction; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineTransportAction; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.core.Strings; +import org.elasticsearch.ingest.IngestPipelineTestUtils; import org.elasticsearch.license.GetFeatureUsageRequest; import org.elasticsearch.license.GetFeatureUsageResponse; import org.elasticsearch.license.TransportGetFeatureUsageAction; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; @@ -42,6 +36,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.MachineLearningField.ML_FEATURE_FAMILY; import static org.elasticsearch.xpack.ml.inference.loadingservice.LocalModelTests.buildClassification; import static org.elasticsearch.xpack.ml.integration.ModelInferenceActionIT.buildTrainedModelConfigBuilder; @@ -59,13 +54,7 @@ public class TestFeatureLicenseTrackingIT extends MlSingleNodeTestCase { @After public void cleanup() throws Exception { - for (String pipeline : createdPipelines) { - try { - client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(pipeline)).actionGet(); - } catch (Exception ex) { - logger.warn(() -> "error cleaning up pipeline [" + pipeline + "]", ex); - } - } + IngestPipelineTestUtils.deletePipelinesIgnoringExceptions(client(), createdPipelines); // Some of the tests have async side effects. We need to wait for these to complete before continuing // the cleanup, otherwise unexpected indices may get created during the cleanup process. BaseMlIntegTestCase.waitForPendingTasks(client()); @@ -170,7 +159,7 @@ public void testFeatureTrackingInferenceModelPipeline() throws Exception { assertThat(lastUsage.toInstant(), lessThan(recentUsage.toInstant())); }); - client().execute(DeletePipelineTransportAction.TYPE, new DeletePipelineRequest(pipelineId)).actionGet(); + deletePipeline(pipelineId); createdPipelines.remove(pipelineId); // Make sure that feature usage keeps the last usage once the model is removed @@ -210,8 +199,8 @@ private List getJobStats(String jobId) { return response.getResponse().results(); } - private void putTrainedModelIngestPipeline(String pipelineId, String modelId) throws Exception { - client().execute(PutPipelineTransportAction.TYPE, new PutPipelineRequest(pipelineId, new BytesArray(Strings.format(""" + private void putTrainedModelIngestPipeline(String pipelineId, String modelId) { + putJsonPipeline(pipelineId, format(""" { "processors": [ { @@ -222,7 +211,7 @@ private void putTrainedModelIngestPipeline(String pipelineId, String modelId) th } } ] - }""", modelId)), XContentType.JSON)).actionGet(); + }""", modelId)); } } From 6ef94ac1606543c2fd8abf8b809907d56493c931 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 12 Sep 2024 09:49:08 +0200 Subject: [PATCH 50/58] Deduplicate BucketOrder when deserializing (#112707) Deduplicate BucketOrder object by wrapping the StreamInput generated by DelayableWritable objects. --- docs/changelog/112707.yaml | 5 ++ .../common/io/stream/DelayableWriteable.java | 67 ++++++++++++++++++- .../search/aggregations/InternalOrder.java | 14 +++- .../io/stream/DelayableWriteableTests.java | 20 ++++-- .../aggregations/InternalOrderTests.java | 32 +++++++++ 5 files changed, 129 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/112707.yaml diff --git a/docs/changelog/112707.yaml b/docs/changelog/112707.yaml new file mode 100644 index 0000000000000..9f16cfcd2b6f2 --- /dev/null +++ b/docs/changelog/112707.yaml @@ -0,0 +1,5 @@ +pr: 112707 +summary: Deduplicate `BucketOrder` when deserializing +area: Aggregations +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java index 4b3683edf7307..8140649c9fb4c 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/DelayableWriteable.java @@ -15,6 +15,8 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.HashMap; +import java.util.Map; /** * A holder for {@link Writeable}s that delays reading the underlying object @@ -230,11 +232,72 @@ private static T deserialize( ) throws IOException { try ( StreamInput in = registry == null - ? serialized.streamInput() - : new NamedWriteableAwareStreamInput(serialized.streamInput(), registry) + ? new DeduplicateStreamInput(serialized.streamInput(), new DeduplicatorCache()) + : new DeduplicateNamedWriteableAwareStreamInput(serialized.streamInput(), registry, new DeduplicatorCache()) ) { in.setTransportVersion(serializedAtVersion); return reader.read(in); } } + + /** An object implementing this interface can deduplicate instance of the provided objects.*/ + public interface Deduplicator { + T deduplicate(T object); + } + + private static class DeduplicateStreamInput extends FilterStreamInput implements Deduplicator { + + private final Deduplicator deduplicator; + + private DeduplicateStreamInput(StreamInput delegate, Deduplicator deduplicator) { + super(delegate); + this.deduplicator = deduplicator; + } + + @Override + public T deduplicate(T object) { + return deduplicator.deduplicate(object); + } + } + + private static class DeduplicateNamedWriteableAwareStreamInput extends NamedWriteableAwareStreamInput implements Deduplicator { + + private final Deduplicator deduplicator; + + private DeduplicateNamedWriteableAwareStreamInput( + StreamInput delegate, + NamedWriteableRegistry registry, + Deduplicator deduplicator + ) { + super(delegate, registry); + this.deduplicator = deduplicator; + } + + @Override + public T deduplicate(T object) { + return deduplicator.deduplicate(object); + } + } + + /** + * Implementation of a {@link Deduplicator} cache. It can hold up to 1024 instances. + */ + private static class DeduplicatorCache implements Deduplicator { + + private static final int MAX_SIZE = 1024; + // lazily init + private Map cache = null; + + @SuppressWarnings("unchecked") + @Override + public T deduplicate(T object) { + if (cache == null) { + cache = new HashMap<>(); + cache.put(object, object); + } else if (cache.size() < MAX_SIZE) { + object = (T) cache.computeIfAbsent(object, o -> o); + } + return object; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java index 482d915560d04..9dcaa691219ef 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalOrder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; @@ -36,6 +37,7 @@ */ public abstract class InternalOrder extends BucketOrder { // TODO merge the contents of this file into BucketOrder. The way it is now is relic. + /** * {@link Bucket} ordering strategy to sort by a sub-aggregation. */ @@ -476,6 +478,10 @@ public static class Streams { * @throws IOException on error reading from the stream. */ public static BucketOrder readOrder(StreamInput in) throws IOException { + return readOrder(in, true); + } + + private static BucketOrder readOrder(StreamInput in, boolean dedupe) throws IOException { byte id = in.readByte(); switch (id) { case COUNT_DESC_ID: @@ -489,12 +495,18 @@ public static BucketOrder readOrder(StreamInput in) throws IOException { case Aggregation.ID: boolean asc = in.readBoolean(); String key = in.readString(); + if (dedupe && in instanceof DelayableWriteable.Deduplicator bo) { + return bo.deduplicate(new Aggregation(key, asc)); + } return new Aggregation(key, asc); case CompoundOrder.ID: int size = in.readVInt(); List compoundOrder = new ArrayList<>(size); for (int i = 0; i < size; i++) { - compoundOrder.add(Streams.readOrder(in)); + compoundOrder.add(Streams.readOrder(in, false)); + } + if (dedupe && in instanceof DelayableWriteable.Deduplicator bo) { + return bo.deduplicate(new CompoundOrder(compoundOrder, false)); } return new CompoundOrder(compoundOrder, false); default: diff --git a/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java b/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java index 7e42653952a94..52541f1366236 100644 --- a/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java +++ b/server/src/test/java/org/elasticsearch/common/io/stream/DelayableWriteableTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.TransportVersionUtils; import java.io.IOException; +import java.util.Objects; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; @@ -57,19 +58,23 @@ public int hashCode() { } private static class NamedHolder implements Writeable { - private final Example e; + private final Example e1; + private final Example e2; NamedHolder(Example e) { - this.e = e; + this.e1 = e; + this.e2 = e; } NamedHolder(StreamInput in) throws IOException { - e = in.readNamedWriteable(Example.class); + e1 = ((DelayableWriteable.Deduplicator) in).deduplicate(in.readNamedWriteable(Example.class)); + e2 = ((DelayableWriteable.Deduplicator) in).deduplicate(in.readNamedWriteable(Example.class)); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(e); + out.writeNamedWriteable(e1); + out.writeNamedWriteable(e2); } @Override @@ -78,12 +83,12 @@ public boolean equals(Object obj) { return false; } NamedHolder other = (NamedHolder) obj; - return e.equals(other.e); + return e1.equals(other.e1) && e2.equals(other.e2); } @Override public int hashCode() { - return e.hashCode(); + return Objects.hash(e1, e2); } } @@ -130,6 +135,9 @@ public void testRoundTripFromDelayedWithNamedWriteable() throws IOException { DelayableWriteable original = DelayableWriteable.referencing(n).asSerialized(NamedHolder::new, writableRegistry()); assertTrue(original.isSerialized()); roundTripTestCase(original, NamedHolder::new); + NamedHolder copy = original.expand(); + // objects have been deduplicated + assertSame(copy.e1, copy.e2); } public void testRoundTripFromDelayedFromOldVersion() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java index b6cdf1a2825d8..1d21a1fe173df 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java @@ -7,6 +7,10 @@ */ package org.elasticsearch.search.aggregations; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.DelayableWriteable; +import org.elasticsearch.common.io.stream.FilterStreamInput; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.aggregations.InternalOrder.CompoundOrder; import org.elasticsearch.test.AbstractXContentSerializingTestCase; @@ -116,4 +120,32 @@ protected BucketOrder mutateInstance(BucketOrder instance) { } } + public void testInternalOrderDeduplicated() throws IOException { + BucketOrder testInstance = createTestInstance(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + instanceWriter().write(output, testInstance); + if (testInstance instanceof CompoundOrder || testInstance instanceof InternalOrder.Aggregation) { + assertNotSame(testInstance, instanceReader().read(output.bytes().streamInput())); + } + StreamInput dedupe = new DeduplicatorStreamInput(output.bytes().streamInput(), testInstance); + assertSame(testInstance, instanceReader().read(dedupe)); + } + } + + private static class DeduplicatorStreamInput extends FilterStreamInput implements DelayableWriteable.Deduplicator { + + private final BucketOrder order; + + protected DeduplicatorStreamInput(StreamInput delegate, BucketOrder order) { + super(delegate); + this.order = order; + } + + @SuppressWarnings("unchecked") + @Override + public T deduplicate(T object) { + return (T) order; + } + } + } From 3fadf5313990746ae33b2691515491f0dfd2423b Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 12 Sep 2024 09:55:23 +0100 Subject: [PATCH 51/58] Block use of current version feature in yaml tests (#112737) Using the current version as a condition is not compatible with Serverless, as the version doesn't change between commits. Cluster features should be used instead. --- .../test/rest/ESRestTestFeatureService.java | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index aa04855c03626..0988e7ec19c79 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -95,6 +95,27 @@ public boolean clusterHasFeature(String featureId, boolean any) { Matcher matcher = VERSION_FEATURE_PATTERN.matcher(featureId); if (matcher.matches()) { Version extractedVersion = Version.fromString(matcher.group(1)); + if (extractedVersion.after(Version.CURRENT)) { + throw new IllegalArgumentException( + Strings.format( + "Cannot use a synthetic feature [%s] for a version after the current version [%s]", + featureId, + Version.CURRENT + ) + ); + } + + if (extractedVersion.equals(Version.CURRENT)) { + throw new IllegalArgumentException( + Strings.format( + "Cannot use a synthetic feature [%s] for the current version [%s]; " + + "please define a test cluster feature alongside the corresponding code change instead", + featureId, + Version.CURRENT + ) + ); + } + return checkCollection(nodeVersions, v -> v.onOrAfter(extractedVersion), any); } From b801949621790eb58eb48d85f62579eacd9ab528 Mon Sep 17 00:00:00 2001 From: Victor Martinez Date: Thu, 12 Sep 2024 10:58:23 +0200 Subject: [PATCH 52/58] ci(bump automation): bump ubi9 for ironbank (#112298) ### What Enable updatecli [policies](https://www.updatecli.io/blog/shareable-policies) to bump the Ironbank versions automatically, then https://github.com/elastic/elasticsearch/pull/111743 won't be manually created but when a new ubit9 version is released and available in the Ironbank system. Those policies can be found at [elastic/oblt-updatecli-policies@main/updatecli/policies/](https://github.com/elastic/oblt-updatecli-policies/tree/main/updatecli/policies/?rgh-link-date=2024-08-28T16%3A30%3A04Z) (NOTE: This is a private repository only accessible by Elastic employees) #### How to test this PR locally 1. `gh pr checkout 112298` 2. Install [updatecli](https://www.updatecli.io/docs/prologue/installation/) 3. Login to [ghcr.io](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#authenticating-to-the-container-registry) 4. Diff (dry-run) ```bash $ GITHUB_TOKEN=$(gh auth token) updatecli compose diff --experimental ``` 5. Create Pull Request if new changes ```bash $ GITHUB_REPOSITORY=elastic/elasticsearch \ GITHUB_ACTOR=v1v \ GITHUB_TOKEN=$(gh auth token) \ updatecli compose apply --experimental ``` --- .github/updatecli/values.d/ironbank.yml | 3 ++ .github/updatecli/values.d/scm.yml | 10 +++++ .../updatecli/values.d/updatecli-compose.yml | 3 ++ .github/workflows/updatecli-compose.yml | 38 +++++++++++++++++++ updatecli-compose.yaml | 14 +++++++ 5 files changed, 68 insertions(+) create mode 100644 .github/updatecli/values.d/ironbank.yml create mode 100644 .github/updatecli/values.d/scm.yml create mode 100644 .github/updatecli/values.d/updatecli-compose.yml create mode 100644 .github/workflows/updatecli-compose.yml create mode 100644 updatecli-compose.yaml diff --git a/.github/updatecli/values.d/ironbank.yml b/.github/updatecli/values.d/ironbank.yml new file mode 100644 index 0000000000000..10b2c948a5837 --- /dev/null +++ b/.github/updatecli/values.d/ironbank.yml @@ -0,0 +1,3 @@ +config: + - path: distribution/docker/src/docker/iron_bank + dockerfile: ../Dockerfile diff --git a/.github/updatecli/values.d/scm.yml b/.github/updatecli/values.d/scm.yml new file mode 100644 index 0000000000000..ec4bd2d612091 --- /dev/null +++ b/.github/updatecli/values.d/scm.yml @@ -0,0 +1,10 @@ +scm: + enabled: true + owner: elastic + repository: elasticsearch + branch: main + commitusingapi: true + # begin updatecli-compose policy values + user: elasticmachine + email: 42973632+elasticmachine@users.noreply.github.com + # end updatecli-compose policy values diff --git a/.github/updatecli/values.d/updatecli-compose.yml b/.github/updatecli/values.d/updatecli-compose.yml new file mode 100644 index 0000000000000..02df609f2a30c --- /dev/null +++ b/.github/updatecli/values.d/updatecli-compose.yml @@ -0,0 +1,3 @@ +spec: + files: + - "updatecli-compose.yaml" \ No newline at end of file diff --git a/.github/workflows/updatecli-compose.yml b/.github/workflows/updatecli-compose.yml new file mode 100644 index 0000000000000..cbab42d3a63b1 --- /dev/null +++ b/.github/workflows/updatecli-compose.yml @@ -0,0 +1,38 @@ +--- +name: updatecli-compose + +on: + workflow_dispatch: + schedule: + - cron: '0 6 * * *' + +permissions: + contents: read + +jobs: + compose: + runs-on: ubuntu-latest + permissions: + contents: write + packages: read + pull-requests: write + steps: + - uses: actions/checkout@v4 + + - uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3.2.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - uses: elastic/oblt-actions/updatecli/run@v1 + with: + command: --experimental compose diff + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - uses: elastic/oblt-actions/updatecli/run@v1 + with: + command: --experimental compose apply + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/updatecli-compose.yaml b/updatecli-compose.yaml new file mode 100644 index 0000000000000..8ad9bd6df8afb --- /dev/null +++ b/updatecli-compose.yaml @@ -0,0 +1,14 @@ +# Config file for `updatecli compose ...`. +# https://www.updatecli.io/docs/core/compose/ +policies: + - name: Handle ironbank bumps + policy: ghcr.io/elastic/oblt-updatecli-policies/ironbank/templates:0.3.0@sha256:b0c841d8fb294e6b58359462afbc83070dca375ac5dd0c5216c8926872a98bb1 + values: + - .github/updatecli/values.d/scm.yml + - .github/updatecli/values.d/ironbank.yml + + - name: Update Updatecli policies + policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.4.0@sha256:254367f5b1454fd6032b88b314450cd3b6d5e8d5b6c953eb242a6464105eb869 + values: + - .github/updatecli/values.d/scm.yml + - .github/updatecli/values.d/updatecli-compose.yml \ No newline at end of file From c3c4aa5820c44af676600f08a16c428daffe2f7d Mon Sep 17 00:00:00 2001 From: Matteo Piergiovanni <134913285+piergm@users.noreply.github.com> Date: Thu, 12 Sep 2024 11:14:53 +0200 Subject: [PATCH 53/58] Two empty mappings now are created equally (#107936) * Two empty mappings reported equally in field_caps * empty mapping now is {} * iter --- docs/changelog/107936.yaml | 6 ++ .../test/indices.get_mapping/10_basic.yml | 88 +++++++++---------- .../admin/indices/create/CreateIndexIT.java | 16 ++++ .../indices/create/CreateIndexRequest.java | 10 ++- .../rollover/MetadataRolloverService.java | 2 +- .../MetadataMigrateToDataStreamService.java | 2 +- .../xpack/enrich/EnrichPolicyRunner.java | 2 +- 7 files changed, 76 insertions(+), 50 deletions(-) create mode 100644 docs/changelog/107936.yaml diff --git a/docs/changelog/107936.yaml b/docs/changelog/107936.yaml new file mode 100644 index 0000000000000..89dd57f7a81a5 --- /dev/null +++ b/docs/changelog/107936.yaml @@ -0,0 +1,6 @@ +pr: 107936 +summary: Two empty mappings now are created equally +area: Mapping +type: bug +issues: + - 107031 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml index e46f67326a8d2..371a9961122a7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml @@ -1,85 +1,85 @@ --- setup: - do: - indices.create: - index: test_1 - body: - mappings: {} + indices.create: + index: test_1 + body: + mappings: {} - do: - indices.create: - index: test_2 - body: - mappings: {} + indices.create: + index: test_2 + body: + mappings: {} --- "Get /{index}/_mapping with empty mappings": - - do: - indices.create: - index: t + - do: + indices.create: + index: t - - do: - indices.get_mapping: - index: t + - do: + indices.get_mapping: + index: t - - match: { t.mappings: {}} + - match: { t.mappings: {}} --- "Get /_mapping": - - do: - indices.get_mapping: {} + - do: + indices.get_mapping: {} - - is_true: test_1.mappings - - is_true: test_2.mappings + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /{index}/_mapping": - - do: - indices.get_mapping: - index: test_1 + - do: + indices.get_mapping: + index: test_1 - - is_true: test_1.mappings - - is_false: test_2 + - is_true: test_1.mappings + - is_false: test_2 --- "Get /_all/_mapping": - - do: - indices.get_mapping: - index: _all + - do: + indices.get_mapping: + index: _all - - is_true: test_1.mappings - - is_true: test_2.mappings + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /*/_mapping": - - do: - indices.get_mapping: - index: '*' + - do: + indices.get_mapping: + index: '*' - - is_true: test_1.mappings - - is_true: test_2.mappings + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /index,index/_mapping": - - do: - indices.get_mapping: - index: test_1,test_2 + - do: + indices.get_mapping: + index: test_1,test_2 - - is_true: test_1.mappings - - is_true: test_2.mappings + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /index*/_mapping/": - - do: - indices.get_mapping: - index: '*2' + - do: + indices.get_mapping: + index: '*2' - - is_true: test_2.mappings - - is_false: test_1 + - is_true: test_2.mappings + - is_false: test_1 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 3723a415ab061..a835276451b12 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -16,6 +16,8 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; @@ -154,6 +156,20 @@ public void testEmptyMappings() throws Exception { assertTrue(mappings.sourceAsMap().isEmpty()); } + public void testTwoEmptyEqualMappings() throws Exception { + assertAcked(prepareCreate("test1")); + assertAcked(prepareCreate("test2").setMapping(XContentFactory.jsonBuilder().startObject().endObject())); + FieldCapabilitiesRequest fieldCapsReq1 = new FieldCapabilitiesRequest(); + fieldCapsReq1.indices("test1"); + fieldCapsReq1.fields("*"); + FieldCapabilitiesResponse fieldCapsResp1 = internalCluster().coordOnlyNodeClient().fieldCaps(fieldCapsReq1).actionGet(); + FieldCapabilitiesRequest fieldCapsReq2 = new FieldCapabilitiesRequest(); + fieldCapsReq2.indices("test2"); + fieldCapsReq2.fields("*"); + FieldCapabilitiesResponse fieldCapsResp2 = internalCluster().coordOnlyNodeClient().fieldCaps(fieldCapsReq2).actionGet(); + assertEquals(fieldCapsResp1.get(), fieldCapsResp2.get()); + } + public void testInvalidShardCountSettings() throws Exception { int value = randomIntBetween(-10, 0); try { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 5d1b7264ebf81..152cd6a34196d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -67,8 +67,9 @@ public class CreateIndexRequest extends AcknowledgedRequest private boolean initializeFailureStore; private Settings settings = Settings.EMPTY; + public static final String EMPTY_MAPPINGS = "{}"; - private String mappings = "{}"; + private String mappings = EMPTY_MAPPINGS; private final Set aliases = new HashSet<>(); @@ -284,8 +285,11 @@ private CreateIndexRequest mapping(BytesReference source, XContentType xContentT } private CreateIndexRequest mapping(String type, Map source) { - // wrap it in a type map if its not - if (source.size() != 1 || source.containsKey(type) == false) { + if (source.isEmpty()) { + // If no source is provided we return empty mappings + return mapping(EMPTY_MAPPINGS); + } else if (source.size() != 1 || source.containsKey(type) == false) { + // wrap it in a type map if its not source = Map.of(MapperService.SINGLE_MAPPING_NAME, source); } else if (MapperService.SINGLE_MAPPING_NAME.equals(type) == false) { // if it has a different type name, then unwrap and rewrap with _doc diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index b8d975f82980d..928948059df91 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -660,7 +660,7 @@ static void validate( } if ((request.settings().equals(Settings.EMPTY) == false) || (request.aliases().size() > 0) - || (request.mappings().equals("{}") == false)) { + || (request.mappings().equals(CreateIndexRequest.EMPTY_MAPPINGS) == false)) { throw new IllegalArgumentException( "aliases, mappings, and index settings may not be specified when rolling over a data stream" ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java index 13c2fabd6b3df..7f0840ca8bb29 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMigrateToDataStreamService.java @@ -224,7 +224,7 @@ static void prepareBackingIndex( Settings nodeSettings ) throws IOException { MappingMetadata mm = im.mapping(); - if (mm == null) { + if (mm == null || mm.equals(MappingMetadata.EMPTY_MAPPINGS)) { throw new IllegalArgumentException("backing index [" + im.getIndex().getName() + "] must have mappings for a timestamp field"); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index 298a7c307efa5..a279eac5befc6 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -172,7 +172,7 @@ private static List> toMappings(GetIndexResponse response) { private Map getMappings(final GetIndexResponse getIndexResponse, final String sourceIndexName) { Map mappings = getIndexResponse.mappings(); MappingMetadata indexMapping = mappings.get(sourceIndexName); - if (indexMapping == MappingMetadata.EMPTY_MAPPINGS) { + if (MappingMetadata.EMPTY_MAPPINGS.equals(indexMapping)) { throw new ElasticsearchException( "Enrich policy execution for [{}] failed. No mapping available on source [{}] included in [{}]", policyName, From af1ba757d01cd492f1d3e81cc33540f11e107fa4 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 12 Sep 2024 10:58:20 +0100 Subject: [PATCH 54/58] Cleanup shutdown module bwc in v9 (#112793) Removes a bunch of now-unnecessary bwc code in the shutdown module. --- .../shutdown/DeleteShutdownNodeAction.java | 25 +-------- .../shutdown/GetShutdownStatusAction.java | 25 +-------- .../xpack/shutdown/PutShutdownNodeAction.java | 55 ++----------------- .../xpack/shutdown/ShutdownPlugin.java | 10 ---- .../TransportDeleteShutdownNodeAction.java | 2 +- .../TransportGetShutdownStatusAction.java | 2 +- .../TransportPutShutdownNodeAction.java | 2 +- .../shutdown/DeleteShutdownRequestTests.java | 2 +- .../GetShutdownStatusRequestTests.java | 2 +- .../shutdown/PutShutdownRequestTests.java | 2 +- 10 files changed, 15 insertions(+), 112 deletions(-) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 6f9621b4bdb2e..d9d7a5ddd3da2 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -15,12 +15,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import java.io.IOException; -import static org.elasticsearch.xpack.shutdown.ShutdownPlugin.serializesWithParentTaskAndTimeouts; - public class DeleteShutdownNodeAction extends ActionType { public static final DeleteShutdownNodeAction INSTANCE = new DeleteShutdownNodeAction(); @@ -39,32 +36,14 @@ public Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, String nodeId) this.nodeId = nodeId; } - @UpdateForV9 // inline when bwc no longer needed - public static Request readFrom(StreamInput in) throws IOException { - if (serializesWithParentTaskAndTimeouts(in.getTransportVersion())) { - return new Request(in); - } else { - return new Request(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); - } - } - - private Request(StreamInput in) throws IOException { + public Request(StreamInput in) throws IOException { super(in); - assert serializesWithParentTaskAndTimeouts(in.getTransportVersion()); this.nodeId = in.readString(); } - @UpdateForV9 // remove when bwc no longer needed - private Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, StreamInput in) throws IOException { - this(masterNodeTimeout, ackTimeout, in.readString()); - assert serializesWithParentTaskAndTimeouts(in.getTransportVersion()) == false; - } - @Override public void writeTo(StreamOutput out) throws IOException { - if (serializesWithParentTaskAndTimeouts(out.getTransportVersion())) { - super.writeTo(out); - } + super.writeTo(out); out.writeString(this.nodeId); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index 7e5498a7676ba..24d58d0d631d4 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.shutdown; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; @@ -19,7 +18,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -50,33 +48,14 @@ public Request(TimeValue masterNodeTimeout, String... nodeIds) { this.nodeIds = nodeIds; } - @UpdateForV9 // only needed for bwc, inline in v9 - public static Request readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT)) { - return new Request(in); - } else { - return new Request(TimeValue.THIRTY_SECONDS, in); - } - } - - private Request(StreamInput in) throws IOException { + public Request(StreamInput in) throws IOException { super(in); - assert in.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT); - nodeIds = in.readStringArray(); - } - - @UpdateForV9 // only needed for bwc, remove in v9 - private Request(TimeValue masterNodeTimeout, StreamInput in) throws IOException { - super(masterNodeTimeout); - assert in.getTransportVersion().before(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT); nodeIds = in.readStringArray(); } @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.GET_SHUTDOWN_STATUS_TIMEOUT)) { - super.writeTo(out); - } + super.writeTo(out); out.writeStringArray(this.nodeIds); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index bb489f337d02f..88be34d215ed8 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -17,17 +17,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.GRACE_PERIOD_ADDED_VERSION; -import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.REPLACE_SHUTDOWN_TYPE_ADDED_VERSION; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.shutdown.ShutdownPlugin.serializesWithParentTaskAndTimeouts; public class PutShutdownNodeAction extends ActionType { @@ -112,18 +108,8 @@ public Request( this.gracePeriod = gracePeriod; } - @UpdateForV9 // inline when bwc no longer needed - public static Request readFrom(StreamInput in) throws IOException { - if (serializesWithParentTaskAndTimeouts(in.getTransportVersion())) { - return new Request(in); - } else { - return new Request(TimeValue.THIRTY_SECONDS, TimeValue.THIRTY_SECONDS, in); - } - } - - private Request(StreamInput in) throws IOException { + public Request(StreamInput in) throws IOException { super(in); - assert serializesWithParentTaskAndTimeouts(in.getTransportVersion()); this.nodeId = in.readString(); this.type = in.readEnum(SingleNodeShutdownMetadata.Type.class); this.reason = in.readString(); @@ -132,46 +118,15 @@ private Request(StreamInput in) throws IOException { this.gracePeriod = in.readOptionalTimeValue(); } - @UpdateForV9 // remove when bwc no longer needed - private Request(TimeValue masterNodeTimeout, TimeValue ackTimeout, StreamInput in) throws IOException { - super(masterNodeTimeout, ackTimeout); - assert serializesWithParentTaskAndTimeouts(in.getTransportVersion()) == false; - this.nodeId = in.readString(); - this.type = in.readEnum(SingleNodeShutdownMetadata.Type.class); - this.reason = in.readString(); - this.allocationDelay = in.readOptionalTimeValue(); - if (in.getTransportVersion().onOrAfter(REPLACE_SHUTDOWN_TYPE_ADDED_VERSION)) { - this.targetNodeName = in.readOptionalString(); - } else { - this.targetNodeName = null; - } - if (in.getTransportVersion().onOrAfter(GRACE_PERIOD_ADDED_VERSION)) { - this.gracePeriod = in.readOptionalTimeValue(); - } else { - this.gracePeriod = null; - } - } - @Override public void writeTo(StreamOutput out) throws IOException { - if (serializesWithParentTaskAndTimeouts(out.getTransportVersion())) { - super.writeTo(out); - } + super.writeTo(out); out.writeString(nodeId); - if (out.getTransportVersion().before(REPLACE_SHUTDOWN_TYPE_ADDED_VERSION) - && this.type == SingleNodeShutdownMetadata.Type.REPLACE) { - out.writeEnum(SingleNodeShutdownMetadata.Type.REMOVE); - } else { - out.writeEnum(type); - } + out.writeEnum(type); out.writeString(reason); out.writeOptionalTimeValue(allocationDelay); - if (out.getTransportVersion().onOrAfter(REPLACE_SHUTDOWN_TYPE_ADDED_VERSION)) { - out.writeOptionalString(targetNodeName); - } - if (out.getTransportVersion().onOrAfter(GRACE_PERIOD_ADDED_VERSION)) { - out.writeOptionalTimeValue(gracePeriod); - } + out.writeOptionalString(targetNodeName); + out.writeOptionalTimeValue(gracePeriod); } public String getNodeId() { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java index 621836cea9f89..25c6f431e57c8 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.shutdown; -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; @@ -18,7 +16,6 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; @@ -72,11 +69,4 @@ public List getRestHandlers( ) { return Arrays.asList(new RestPutShutdownNodeAction(), new RestDeleteShutdownNodeAction(), new RestGetShutdownStatusAction()); } - - @UpdateForV9 // always true in v9 so can be removed - static boolean serializesWithParentTaskAndTimeouts(TransportVersion transportVersion) { - return transportVersion.isPatchFrom(TransportVersions.V_8_13_4) - || transportVersion.isPatchFrom(TransportVersions.V_8_14_0) - || transportVersion.onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX); - } } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java index d66f2cbddd182..50742c1c573fb 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportDeleteShutdownNodeAction.java @@ -120,7 +120,7 @@ public TransportDeleteShutdownNodeAction( clusterService, threadPool, actionFilters, - Request::readFrom, + Request::new, indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java index 33a285128e08c..4f2a738873ff9 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportGetShutdownStatusAction.java @@ -88,7 +88,7 @@ public TransportGetShutdownStatusAction( clusterService, threadPool, actionFilters, - GetShutdownStatusAction.Request::readFrom, + GetShutdownStatusAction.Request::new, indexNameExpressionResolver, GetShutdownStatusAction.Response::new, threadPool.executor(ThreadPool.Names.MANAGEMENT) diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java index 4e1b8c3cf3b9a..4c163bc1a9361 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/TransportPutShutdownNodeAction.java @@ -156,7 +156,7 @@ public TransportPutShutdownNodeAction( clusterService, threadPool, actionFilters, - Request::readFrom, + Request::new, indexNameExpressionResolver, EsExecutors.DIRECT_EXECUTOR_SERVICE ); diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/DeleteShutdownRequestTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/DeleteShutdownRequestTests.java index 573b4077c263f..cb2b8b5a706ce 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/DeleteShutdownRequestTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/DeleteShutdownRequestTests.java @@ -33,7 +33,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override protected Writeable.Reader instanceReader() { return in -> { - final var request = DeleteShutdownNodeAction.Request.readFrom(in); + final var request = new DeleteShutdownNodeAction.Request(in); return new RequestWrapper(request.getNodeId(), request.getParentTask(), request.masterNodeTimeout(), request.ackTimeout()); }; } diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java index 166bec9ec5f62..db7a132a68f4d 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusRequestTests.java @@ -19,7 +19,7 @@ public class GetShutdownStatusRequestTests extends AbstractWireSerializingTestCa @Override protected Writeable.Reader instanceReader() { - return GetShutdownStatusAction.Request::readFrom; + return GetShutdownStatusAction.Request::new; } @Override diff --git a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/PutShutdownRequestTests.java b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/PutShutdownRequestTests.java index 806c1ae832958..c4ad0608328c6 100644 --- a/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/PutShutdownRequestTests.java +++ b/x-pack/plugin/shutdown/src/test/java/org/elasticsearch/xpack/shutdown/PutShutdownRequestTests.java @@ -53,7 +53,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override protected Writeable.Reader instanceReader() { return in -> { - final var request = PutShutdownNodeAction.Request.readFrom(in); + final var request = new PutShutdownNodeAction.Request(in); return new RequestWrapper( request.getNodeId(), request.getType(), From a14f529fd9a593f818e979f3447c2cdc2291db89 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 12 Sep 2024 11:36:42 +0100 Subject: [PATCH 55/58] Update last few references in yaml tests from ROOT locale to ENGLISH (#112791) --- .../rest-api-spec/test/runtime_fields/10_keyword.yml | 4 ++-- .../test/runtime_fields/13_keyword_calculated_at_index.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml index 11214907eb17e..8728d4ac413b7 100644 --- a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml +++ b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/10_keyword.yml @@ -19,7 +19,7 @@ setup: script: | Instant instant = Instant.ofEpochMilli(params._source.timestamp); ZonedDateTime dt = ZonedDateTime.ofInstant(instant, ZoneId.of("UTC")); - emit(dt.dayOfWeek.getDisplayName(TextStyle.FULL, Locale.ROOT)); + emit(dt.dayOfWeek.getDisplayName(TextStyle.FULL, Locale.ENGLISH)); # Test fetching many values day_of_week_letters: type: keyword @@ -218,7 +218,7 @@ setup: day_of_week: type: keyword script: | - emit(doc['timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.SHORT, Locale.ROOT)); + emit(doc['timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.SHORT, Locale.ENGLISH)); - do: search: index: sensor diff --git a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/13_keyword_calculated_at_index.yml b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/13_keyword_calculated_at_index.yml index 4bedfa3e923a8..c27ddab72bff4 100644 --- a/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/13_keyword_calculated_at_index.yml +++ b/modules/runtime-fields-common/src/yamlRestTest/resources/rest-api-spec/test/runtime_fields/13_keyword_calculated_at_index.yml @@ -28,7 +28,7 @@ setup: script: | Instant instant = Instant.ofEpochMilli(params._source.timestamp); ZonedDateTime dt = ZonedDateTime.ofInstant(instant, ZoneId.of("UTC")); - emit(dt.dayOfWeek.getDisplayName(TextStyle.FULL, Locale.ROOT)); + emit(dt.dayOfWeek.getDisplayName(TextStyle.FULL, Locale.ENGLISH)); # Test fetching many values day_of_week_letters: type: keyword From 7aa98eff4be4cc1b6baca859124f5a307adb674e Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:11:20 +0200 Subject: [PATCH 56/58] Remove adaptive allocations feature flag (#112798) --- .../test/cluster/FeatureFlag.java | 7 +----- .../StartTrainedModelDeploymentAction.java | 15 +++++------- .../UpdateTrainedModelDeploymentAction.java | 15 +++++------- .../AdaptiveAllocationsFeatureFlag.java | 24 ------------------- .../integration/ModelRegistryIT.java | 2 -- .../inference/services/ServiceUtils.java | 4 ---- .../ElasticsearchInternalServiceSettings.java | 19 +++++---------- .../inference/services/ServiceUtilsTests.java | 5 ---- .../xpack/ml/MlInitializationService.java | 9 ++----- 9 files changed, 21 insertions(+), 79 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsFeatureFlag.java diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index a8a33da27aebe..49fb38b518dce 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,12 +16,7 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), - INFERENCE_ADAPTIVE_ALLOCATIONS_ENABLED( - "es.inference_adaptive_allocations_feature_flag_enabled=true", - Version.fromString("8.16.0"), - null - ); + FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 7ae9a4a4b2d58..4a570bfde99a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ml.MlConfigVersion; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; @@ -120,14 +119,12 @@ public static class Request extends MasterNodeRequest implements ToXCon ObjectParser.ValueType.VALUE ); PARSER.declareString(Request::setPriority, PRIORITY); - if (AdaptiveAllocationsFeatureFlag.isEnabled()) { - PARSER.declareObjectOrNull( - Request::setAdaptiveAllocationsSettings, - (p, c) -> AdaptiveAllocationsSettings.PARSER.parse(p, c).build(), - null, - ADAPTIVE_ALLOCATIONS - ); - } + PARSER.declareObjectOrNull( + Request::setAdaptiveAllocationsSettings, + (p, c) -> AdaptiveAllocationsSettings.PARSER.parse(p, c).build(), + null, + ADAPTIVE_ALLOCATIONS + ); } public static Request parseRequest(String modelId, String deploymentId, XContentParser parser) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index 61601c402e8e7..7fca223b2ee7e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -20,7 +20,6 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -50,14 +49,12 @@ public static class Request extends AcknowledgedRequest implements ToXC static { PARSER.declareString(Request::setDeploymentId, MODEL_ID); PARSER.declareInt(Request::setNumberOfAllocations, NUMBER_OF_ALLOCATIONS); - if (AdaptiveAllocationsFeatureFlag.isEnabled()) { - PARSER.declareObjectOrNull( - Request::setAdaptiveAllocationsSettings, - (p, c) -> AdaptiveAllocationsSettings.PARSER.parse(p, c).build(), - AdaptiveAllocationsSettings.RESET_PLACEHOLDER, - ADAPTIVE_ALLOCATIONS - ); - } + PARSER.declareObjectOrNull( + Request::setAdaptiveAllocationsSettings, + (p, c) -> AdaptiveAllocationsSettings.PARSER.parse(p, c).build(), + AdaptiveAllocationsSettings.RESET_PLACEHOLDER, + ADAPTIVE_ALLOCATIONS + ); PARSER.declareString((r, val) -> r.ackTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsFeatureFlag.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsFeatureFlag.java deleted file mode 100644 index a3b508c0534f9..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/AdaptiveAllocationsFeatureFlag.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.core.ml.inference.assignment; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * semantic_text feature flag. When the feature is complete, this flag will be removed. - */ -public class AdaptiveAllocationsFeatureFlag { - - private AdaptiveAllocationsFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_adaptive_allocations"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index d776f3963c2ca..5157683f2dce9 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elser.ElserInternalModel; @@ -102,7 +101,6 @@ public void testStoreModelWithUnknownFields() throws Exception { } public void testGetModel() throws Exception { - assumeTrue("Only if 'inference_adaptive_allocations' feature flag is enabled", AdaptiveAllocationsFeatureFlag.isEnabled()); String inferenceEntityId = "test-get-model"; Model model = buildElserModelConfig(inferenceEntityId, TaskType.SPARSE_EMBEDDING); AtomicReference putModelHolder = new AtomicReference<>(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 7e46dcfea7592..c75ded629605f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -24,7 +24,6 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.TextEmbedding; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.inference.services.settings.ApiKeySecrets; @@ -137,9 +136,6 @@ public static AdaptiveAllocationsSettings removeAsAdaptiveAllocationsSettings( String key, ValidationException validationException ) { - if (AdaptiveAllocationsFeatureFlag.isEnabled() == false) { - return null; - } Map settingsMap = ServiceUtils.removeFromMap(sourceMap, key); if (settingsMap == null) { return null; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index 8de791325a6df..1acf19c5373b7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -16,7 +16,6 @@ import org.elasticsearch.inference.ServiceSettings; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.inference.services.ServiceUtils; @@ -88,18 +87,12 @@ protected static ElasticsearchInternalServiceSettings.Builder fromMap( String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); if (numAllocations == null && adaptiveAllocationsSettings == null) { - if (AdaptiveAllocationsFeatureFlag.isEnabled()) { - validationException.addValidationError( - ServiceUtils.missingOneOfSettingsErrorMsg( - List.of(NUM_ALLOCATIONS, ADAPTIVE_ALLOCATIONS), - ModelConfigurations.SERVICE_SETTINGS - ) - ); - } else { - validationException.addValidationError( - ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) - ); - } + validationException.addValidationError( + ServiceUtils.missingOneOfSettingsErrorMsg( + List.of(NUM_ALLOCATIONS, ADAPTIVE_ALLOCATIONS), + ModelConfigurations.SERVICE_SETTINGS + ) + ); } // if an error occurred while parsing, we'll set these to an invalid value, so we don't accidentally get a diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 76f095236af8a..86af5e431d78d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsSettings; import org.elasticsearch.xpack.inference.results.InferenceTextEmbeddingByteResultsTests; import org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests; @@ -290,8 +289,6 @@ public void testRemoveAsOneOfTypesMissingReturnsNull() { } public void testRemoveAsAdaptiveAllocationsSettings() { - assumeTrue("Should only run if adaptive allocations feature flag is enabled", AdaptiveAllocationsFeatureFlag.isEnabled()); - Map map = new HashMap<>( Map.of("settings", new HashMap<>(Map.of("enabled", true, "min_number_of_allocations", 7, "max_number_of_allocations", 42))) ); @@ -314,8 +311,6 @@ public void testRemoveAsAdaptiveAllocationsSettings() { } public void testRemoveAsAdaptiveAllocationsSettings_exceptions() { - assumeTrue("Should only run if adaptive allocations feature flag is enabled", AdaptiveAllocationsFeatureFlag.isEnabled()); - Map map = new HashMap<>( Map.of("settings", new HashMap<>(Map.of("enabled", "YES!", "blah", 42, "max_number_of_allocations", -7))) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index 2b3ed3f7a656c..98dfb13d9e3e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -33,7 +33,6 @@ import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; -import org.elasticsearch.xpack.core.ml.inference.assignment.AdaptiveAllocationsFeatureFlag; import org.elasticsearch.xpack.ml.inference.adaptiveallocations.AdaptiveAllocationsScalerService; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; @@ -126,17 +125,13 @@ public void beforeStop() { public void onMaster() { mlDailyMaintenanceService.start(); - if (AdaptiveAllocationsFeatureFlag.isEnabled()) { - adaptiveAllocationsScalerService.start(); - } + adaptiveAllocationsScalerService.start(); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(this::makeMlInternalIndicesHidden); } public void offMaster() { mlDailyMaintenanceService.stop(); - if (AdaptiveAllocationsFeatureFlag.isEnabled()) { - adaptiveAllocationsScalerService.stop(); - } + adaptiveAllocationsScalerService.stop(); } @Override From 352dd890d99f27b330b739356b7ab41718af5057 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 12 Sep 2024 12:54:01 +0100 Subject: [PATCH 57/58] address comments --- .../MachineLearningPackageLoader.java | 2 +- .../ml/packageloader/action/ModelImporter.java | 16 ++++++++-------- .../packageloader/action/ModelLoaderUtils.java | 6 +++--- .../packageloader/action/ModelImporterTests.java | 8 ++++---- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java index 4a55f7e3579f5..a63d911e9d40d 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/MachineLearningPackageLoader.java @@ -91,7 +91,7 @@ public List> getExecutorBuilders(Settings settings) { } public static FixedExecutorBuilder modelDownloadExecutor(Settings settings) { - // Threadpool with a fixed size of 1 thread for + // Threadpool with a fixed number of threads for // downloading the model definition files return new FixedExecutorBuilder( settings, diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 114544bc8e74e..1cac0a1add7d8 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -102,9 +102,9 @@ private void doImportInternal(ActionListener finalListener if (ModelLoaderUtils.uriIsFile(uri) == false) { var ranges = ModelLoaderUtils.split(config.getSize(), NUMBER_OF_STREAMS, DEFAULT_CHUNK_SIZE); - var downloaders = new ArrayList(ranges.size()); + var downloaders = new ArrayList(ranges.size()); for (var range : ranges) { - downloaders.add(new ModelLoaderUtils.HttStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); + downloaders.add(new ModelLoaderUtils.HttpStreamChunker(uri, range, DEFAULT_CHUNK_SIZE)); } downloadModelDefinition(config.getSize(), totalParts, vocabularyParts, downloaders, finalListener); } else { @@ -125,7 +125,7 @@ void downloadModelDefinition( long size, int totalParts, @Nullable ModelLoaderUtils.VocabularyParts vocabularyParts, - List downloaders, + List downloaders, ActionListener finalListener ) { try (var countingListener = new RefCountingListener(1, ActionListener.wrap(ignore -> executorService.execute(() -> { @@ -156,7 +156,7 @@ void downloadModelDefinition( private void downloadPartInRange( long size, int totalParts, - ModelLoaderUtils.HttStreamChunker downloadChunker, + ModelLoaderUtils.HttpStreamChunker downloadChunker, ExecutorService executorService, RefCountingListener countingListener, ActionListener rangeFullyDownloadedListener @@ -207,7 +207,7 @@ private void downloadPartInRange( private void downloadFinalPart( long size, int totalParts, - ModelLoaderUtils.HttStreamChunker downloader, + ModelLoaderUtils.HttpStreamChunker downloader, ActionListener lastPartWrittenListener ) { assert ThreadPool.assertCurrentThreadPool(MachineLearningPackageLoader.MODEL_DOWNLOAD_THREADPOOL_NAME) @@ -284,9 +284,9 @@ private void indexPart(int partIndex, int totalParts, long totalSize, BytesArray client.execute(PutTrainedModelDefinitionPartAction.INSTANCE, modelPartRequest, listener); } - private void checkDownloadComplete(List downloaders) { - long totalBytesRead = downloaders.stream().mapToLong(ModelLoaderUtils.HttStreamChunker::getTotalBytesRead).sum(); - int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttStreamChunker::getCurrentPart).sum(); + private void checkDownloadComplete(List downloaders) { + long totalBytesRead = downloaders.stream().mapToLong(ModelLoaderUtils.HttpStreamChunker::getTotalBytesRead).sum(); + int totalParts = downloaders.stream().mapToInt(ModelLoaderUtils.HttpStreamChunker::getCurrentPart).sum(); checkSize(totalBytesRead); logger.debug(format("finished importing model [%s] using [%d] parts", modelId, totalParts)); } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 3815ea4ac79a4..42bfbb249b623 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -73,7 +73,7 @@ public String bytesRange() { } } - static class HttStreamChunker { + static class HttpStreamChunker { record BytesAndPartIndex(BytesArray bytes, int partIndex) {} @@ -83,7 +83,7 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} private final AtomicInteger currentPart; private final int lastPartNumber; - HttStreamChunker(URI uri, RequestRange range, int chunkSize) { + HttpStreamChunker(URI uri, RequestRange range, int chunkSize) { var inputStream = getHttpOrHttpsInputStream(uri, range); this.inputStream = inputStream; this.chunkSize = chunkSize; @@ -92,7 +92,7 @@ record BytesAndPartIndex(BytesArray bytes, int partIndex) {} } // This ctor exists for testing purposes only. - HttStreamChunker(InputStream inputStream, RequestRange range, int chunkSize) { + HttpStreamChunker(InputStream inputStream, RequestRange range, int chunkSize) { this.inputStream = inputStream; this.chunkSize = chunkSize; this.lastPartNumber = range.startPart() + range.numParts(); diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java index fa9398955c271..99efb331a350c 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporterTests.java @@ -210,7 +210,7 @@ public void testReadFailure() throws IOException, InterruptedException, URISynta int chunkSize = 10; long size = totalParts * chunkSize; - var streamer = mock(ModelLoaderUtils.HttStreamChunker.class); + var streamer = mock(ModelLoaderUtils.HttpStreamChunker.class); when(streamer.hasNext()).thenReturn(true); when(streamer.next()).thenThrow(new IOException("stream failed")); // fail the read @@ -258,14 +258,14 @@ public void testUploadVocabFailure() throws InterruptedException, URISyntaxExcep verify(client, never()).execute(eq(PutTrainedModelDefinitionPartAction.INSTANCE), any(), any()); } - private List mockHttpStreamChunkers(byte[] modelDef, int chunkSize, int numStreams) { + private List mockHttpStreamChunkers(byte[] modelDef, int chunkSize, int numStreams) { var ranges = ModelLoaderUtils.split(modelDef.length, numStreams, chunkSize); - var result = new ArrayList(ranges.size()); + var result = new ArrayList(ranges.size()); for (var range : ranges) { int len = range.numParts() * chunkSize; var modelDefStream = new ByteArrayInputStream(modelDef, (int) range.rangeStart(), len); - result.add(new ModelLoaderUtils.HttStreamChunker(modelDefStream, range, chunkSize)); + result.add(new ModelLoaderUtils.HttpStreamChunker(modelDefStream, range, chunkSize)); } return result; From d0fbe17b25d969c957bb3451a5fb5164b5ba0352 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Thu, 12 Sep 2024 21:32:37 +0100 Subject: [PATCH 58/58] fix recovery from failure --- .../xpack/ml/packageloader/action/ModelImporter.java | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java index 1cac0a1add7d8..86711804ed03c 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelImporter.java @@ -31,7 +31,6 @@ import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.core.Strings.format; @@ -60,7 +59,6 @@ public class ModelImporter { private final ModelPackageConfig config; private final ModelDownloadTask task; private final ExecutorService executorService; - private final AtomicBoolean listenerIsClosed = new AtomicBoolean(false); private final AtomicInteger progressCounter = new AtomicInteger(); private final URI uri; @@ -169,9 +167,7 @@ private void downloadPartInRange( ); if (countingListener.isFailing()) { - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); - } + rangeFullyDownloadedListener.onResponse(null); // the error has already been reported elsewhere return; } @@ -182,10 +178,8 @@ private void downloadPartInRange( indexPart(bytesAndIndex.partIndex(), totalParts, size, bytesAndIndex.bytes(), countingListener.acquire(ack -> {})); } catch (Exception e) { - countingListener.acquire().onFailure(e); - if (listenerIsClosed.compareAndSet(false, true)) { - countingListener.close(); - } + rangeFullyDownloadedListener.onFailure(e); + return; } if (downloadChunker.hasNext()) {