From 9dd6cefec9d189dbce5ce1df6b76c913d68ddb10 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 24 Mar 2025 09:13:12 -0700 Subject: [PATCH] Move inference examples to appropriate task types (#4049) (cherry picked from commit ae85ae34f0aa4147d6da8c90351e34c3fa42f1fe) --- output/openapi/elasticsearch-openapi.json | 56 +++++++++++++++++++ .../elasticsearch-serverless-openapi.json | 56 +++++++++++++++++++ output/schema/schema-serverless.json | 56 +++++++++++++++++++ output/schema/schema.json | 56 +++++++++++++++++++ .../request/CompletionRequestExample1.yaml} | 0 .../response/CompletionResponseExample1.yaml} | 0 .../request/RerankRequestExample1.yaml} | 0 .../response/RerankResponseExample1.yaml} | 0 .../SparseEmbeddingRequestExample1.yaml} | 0 .../SparseEmbeddingResponseExample1.yaml} | 0 .../TextEmbeddingRequestExample1.yaml} | 0 .../TextEmbeddingResponseExample1.yaml} | 0 12 files changed, 224 insertions(+) rename specification/inference/{inference/examples/request/InferenceRequestExample1.yaml => completion/examples/request/CompletionRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample1.yaml => completion/examples/response/CompletionResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample2.yaml => rerank/examples/request/RerankRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample2.yaml => rerank/examples/response/RerankResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample3.yaml => sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample3.yaml => sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml} (100%) rename specification/inference/{inference/examples/request/InferenceRequestExample4.yaml => text_embedding/examples/request/TextEmbeddingRequestExample1.yaml} (100%) rename specification/inference/{inference/examples/response/InferenceResponseExample4.yaml => text_embedding/examples/response/TextEmbeddingResponseExample1.yaml} (100%) diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 6ff9c9a51c..499fee58ea 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -17627,6 +17627,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -17638,6 +17645,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -18223,6 +18237,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -18234,6 +18255,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -18299,6 +18327,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -18310,6 +18345,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -18449,6 +18491,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -18460,6 +18509,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index bb143450df..433057454f 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -9583,6 +9583,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -9594,6 +9601,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -10179,6 +10193,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -10190,6 +10211,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -10255,6 +10283,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -10266,6 +10301,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -10331,6 +10373,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -10342,6 +10391,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/schema/schema-serverless.json b/output/schema/schema-serverless.json index 2448a062f9..b17011fc86 100644 --- a/output/schema/schema-serverless.json +++ b/output/schema/schema-serverless.json @@ -27132,6 +27132,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -27185,6 +27192,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -28000,6 +28014,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28053,6 +28074,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -28109,6 +28137,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28162,6 +28197,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -28218,6 +28260,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -28271,6 +28320,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", diff --git a/output/schema/schema.json b/output/schema/schema.json index 5107d6024c..efb55bee92 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -150109,6 +150109,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -150162,6 +150169,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.completion" @@ -151445,6 +151459,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151498,6 +151519,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.rerank" @@ -151554,6 +151582,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151607,6 +151642,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.sparse_embedding" @@ -151765,6 +151807,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151818,6 +151867,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.text_embedding" diff --git a/specification/inference/inference/examples/request/InferenceRequestExample1.yaml b/specification/inference/completion/examples/request/CompletionRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample1.yaml rename to specification/inference/completion/examples/request/CompletionRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample1.yaml b/specification/inference/completion/examples/response/CompletionResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample1.yaml rename to specification/inference/completion/examples/response/CompletionResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample2.yaml b/specification/inference/rerank/examples/request/RerankRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample2.yaml rename to specification/inference/rerank/examples/request/RerankRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample2.yaml b/specification/inference/rerank/examples/response/RerankResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample2.yaml rename to specification/inference/rerank/examples/response/RerankResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample3.yaml b/specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample3.yaml rename to specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample3.yaml b/specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample3.yaml rename to specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample4.yaml b/specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample4.yaml rename to specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample4.yaml b/specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample4.yaml rename to specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml