diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/QuestionAnsweringProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/QuestionAnsweringProcessor.java index 05a470c6b08b5..9c912b5a08e59 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/QuestionAnsweringProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/QuestionAnsweringProcessor.java @@ -125,7 +125,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (pyTorchResult.getInferenceResult().length % 2 != 0) { throw new ElasticsearchStatusException( "question answering result has invalid dimension, number of dimensions must be a multiple of 2 found [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, pyTorchResult.getInferenceResult().length ); } @@ -138,7 +138,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (numberOfSpans != tokensList.size()) { throw new ElasticsearchStatusException( "question answering result has invalid dimensions; the number of spans [{}] does not match batched token size [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, numberOfSpans, tokensList.size() ); @@ -153,7 +153,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (starts.length != ends.length) { throw new ElasticsearchStatusException( "question answering result has invalid dimensions; start positions [{}] must equal potential end [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, starts.length, ends.length ); @@ -222,7 +222,7 @@ static void topScores( if (start.length != end.length) { throw new ElasticsearchStatusException( "question answering result has invalid dimensions; possible start tokens [{}] must equal possible end tokens [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, start.length, end.length ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 3db3e0e999106..7938a6ef6d253 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -99,7 +99,7 @@ static InferenceResults processResult( if (result.length != labels.size()) { throw new ElasticsearchStatusException( "Expected exactly [{}] values in text classification result; got [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, labels.size(), result.length ); @@ -108,7 +108,7 @@ static InferenceResults processResult( Map> windowedSeq = tokenization.getTokensBySequenceId(); // TODO adjust logic when batch is allowed if (windowedSeq.size() > 1) { - throw new ElasticsearchStatusException("Unexpected batch input for text classification", RestStatus.INTERNAL_SERVER_ERROR); + throw new ElasticsearchStatusException("Unexpected batch input for text classification", RestStatus.CONFLICT); } double[] normalizedScores = new double[labels.size()]; for (int i = 0; i < pyTorchResult.getInferenceResult()[0].length; i++) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java index c7074f8e7285e..53f41d4af1f81 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java @@ -99,7 +99,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (result.length != 1) { throw new ElasticsearchStatusException( "Expected exactly [1] value in text_similarity result; got [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, result.length ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java index 0d3441315700d..c102bc845982b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java @@ -147,7 +147,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (pyTorchResult.getInferenceResult()[0].length != labels.length) { throw new ElasticsearchStatusException( "Expected exactly [{}] values in zero shot classification result; got [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, labels.length, pyTorchResult.getInferenceResult().length ); @@ -160,7 +160,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (vals.length != 3) { throw new ElasticsearchStatusException( "Expected exactly [{}] values in inner zero shot classification result; got [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, 3, vals.length ); @@ -177,7 +177,7 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn if (vals.length != 3) { throw new ElasticsearchStatusException( "Expected exactly [{}] values in inner zero shot classification result; got [{}]", - RestStatus.INTERNAL_SERVER_ERROR, + RestStatus.CONFLICT, 3, vals.length );