diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 6c45fd480d..cfad1484e9 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -17573,6 +17573,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -17584,6 +17591,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -18124,6 +18138,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -18135,6 +18156,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -18200,6 +18228,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -18211,6 +18246,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -18350,6 +18392,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -18361,6 +18410,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index ef165f4ddf..909ba8972b 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -9395,6 +9395,13 @@ "required": [ "input" ] + }, + "examples": { + "CompletionRequestExample1": { + "summary": "Completion task", + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } } } } @@ -9406,6 +9413,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:CompletionInferenceResult" + }, + "examples": { + "CompletionResponseExample1": { + "summary": "Completion task", + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } } } } @@ -9946,6 +9960,13 @@ "query", "input" ] + }, + "examples": { + "RerankRequestExample1": { + "summary": "Rerank task", + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } } } } @@ -9957,6 +9978,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:RerankedInferenceResult" + }, + "examples": { + "RerankResponseExample1": { + "summary": "Rerank task", + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } } } } @@ -10022,6 +10050,13 @@ "required": [ "input" ] + }, + "examples": { + "SparseEmbeddingRequestExample1": { + "summary": "Sparse embedding task", + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } } } } @@ -10033,6 +10068,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:SparseEmbeddingInferenceResult" + }, + "examples": { + "SparseEmbeddingResponseExample1": { + "summary": "Sparse embedding task", + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } } } } @@ -10098,6 +10140,13 @@ "required": [ "input" ] + }, + "examples": { + "TextEmbeddingRequestExample1": { + "summary": "Text embedding task", + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } } } } @@ -10109,6 +10158,13 @@ "application/json": { "schema": { "$ref": "#/components/schemas/inference._types:TextEmbeddingInferenceResult" + }, + "examples": { + "TextEmbeddingResponseExample1": { + "summary": "Text embedding task", + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } } } } diff --git a/output/schema/schema-serverless.json b/output/schema/schema-serverless.json index aa66f92b23..1f36cd49c2 100644 --- a/output/schema/schema-serverless.json +++ b/output/schema/schema-serverless.json @@ -26708,6 +26708,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -26761,6 +26768,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -27521,6 +27535,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -27574,6 +27595,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -27630,6 +27658,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -27683,6 +27718,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", @@ -27739,6 +27781,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -27792,6 +27841,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "kind": "response", "name": { "name": "Response", diff --git a/output/schema/schema.json b/output/schema/schema.json index 8671596959..568c367605 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -150029,6 +150029,13 @@ ] }, "description": "Perform completion inference on the service", + "examples": { + "CompletionRequestExample1": { + "description": "Run `POST _inference/completion/openai_chat_completions` to perform a completion on the example question.", + "summary": "Completion task", + "value": "{\n \"input\": \"What is Elastic?\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -150082,6 +150089,13 @@ } } }, + "examples": { + "CompletionResponseExample1": { + "description": "A successful response from `POST _inference/completion/openai_chat_completions`.\n", + "summary": "Completion task", + "value": "{\n \"completion\": [\n {\n \"result\": \"Elastic is a company that provides a range of software solutions for search, logging, security, and analytics. Their flagship product is Elasticsearch, an open-source, distributed search engine that allows users to search, analyze, and visualize large volumes of data in real-time. Elastic also offers products such as Kibana, a data visualization tool, and Logstash, a log management and pipeline tool, as well as various other tools and solutions for data analysis and management.\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.completion" @@ -151310,6 +151324,13 @@ ] }, "description": "Perform rereanking inference on the service", + "examples": { + "RerankRequestExample1": { + "description": "Run `POST _inference/rerank/cohere_rerank` to perform reranking on the example input.", + "summary": "Rerank task", + "value": "{\n \"input\": [\"luke\", \"like\", \"leia\", \"chewy\",\"r2d2\", \"star\", \"wars\"],\n \"query\": \"star wars main character\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151363,6 +151384,13 @@ } } }, + "examples": { + "RerankResponseExample1": { + "description": "A successful response from `POST _inference/rerank/cohere_rerank`.\n", + "summary": "Rerank task", + "value": "{\n \"rerank\": [\n {\n \"index\": \"2\",\n \"relevance_score\": \"0.011597361\",\n \"text\": \"leia\"\n },\n {\n \"index\": \"0\",\n \"relevance_score\": \"0.006338922\",\n \"text\": \"luke\"\n },\n {\n \"index\": \"5\",\n \"relevance_score\": \"0.0016166499\",\n \"text\": \"star\"\n },\n {\n \"index\": \"4\",\n \"relevance_score\": \"0.0011695103\",\n \"text\": \"r2d2\"\n },\n {\n \"index\": \"1\",\n \"relevance_score\": \"5.614787E-4\",\n \"text\": \"like\"\n },\n {\n \"index\": \"6\",\n \"relevance_score\": \"3.7850367E-4\",\n \"text\": \"wars\"\n },\n {\n \"index\": \"3\",\n \"relevance_score\": \"1.2508839E-5\",\n \"text\": \"chewy\"\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.rerank" @@ -151419,6 +151447,13 @@ ] }, "description": "Perform sparse embedding inference on the service", + "examples": { + "SparseEmbeddingRequestExample1": { + "description": "Run `POST _inference/sparse_embedding/my-elser-model` to perform sparse embedding on the example sentence.", + "summary": "Sparse embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\"\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151472,6 +151507,13 @@ } } }, + "examples": { + "SparseEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/sparse_embedding/my-elser-model`.\n", + "summary": "Sparse embedding task", + "value": "{\n \"sparse_embedding\": [\n {\n \"port\": 2.1259406,\n \"sky\": 1.7073475,\n \"color\": 1.6922266,\n \"dead\": 1.6247464,\n \"television\": 1.3525393,\n \"above\": 1.2425821,\n \"tuned\": 1.1440028,\n \"colors\": 1.1218185,\n \"tv\": 1.0111054,\n \"ports\": 1.0067928,\n \"poem\": 1.0042328,\n \"channel\": 0.99471164,\n \"tune\": 0.96235967,\n \"scene\": 0.9020516\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.sparse_embedding" @@ -151630,6 +151672,13 @@ ] }, "description": "Perform text embedding inference on the service", + "examples": { + "TextEmbeddingRequestExample1": { + "description": "Run `POST _inference/text_embedding/my-cohere-endpoint` to perform text embedding on the example sentence using the Cohere integration,", + "summary": "Text embedding task", + "value": "{\n \"input\": \"The sky above the port was the color of television tuned to a dead channel.\",\n \"task_settings\": {\n \"input_type\": \"ingest\"\n }\n}" + } + }, "inherits": { "type": { "name": "RequestBase", @@ -151683,6 +151732,13 @@ } } }, + "examples": { + "TextEmbeddingResponseExample1": { + "description": "An abbreviated response from `POST _inference/text_embedding/my-cohere-endpoint`.\n", + "summary": "Text embedding task", + "value": "{\n \"text_embedding\": [\n {\n \"embedding\": [\n {\n 0.018569946,\n -0.036895752,\n 0.01486969,\n -0.0045204163,\n -0.04385376,\n 0.0075950623,\n 0.04260254,\n -0.004005432,\n 0.007865906,\n 0.030792236,\n -0.050476074,\n 0.011795044,\n -0.011642456,\n -0.010070801\n }\n ]\n }\n ]\n}" + } + }, "name": { "name": "Response", "namespace": "inference.text_embedding" diff --git a/specification/inference/inference/examples/request/InferenceRequestExample1.yaml b/specification/inference/completion/examples/request/CompletionRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample1.yaml rename to specification/inference/completion/examples/request/CompletionRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample1.yaml b/specification/inference/completion/examples/response/CompletionResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample1.yaml rename to specification/inference/completion/examples/response/CompletionResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample2.yaml b/specification/inference/rerank/examples/request/RerankRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample2.yaml rename to specification/inference/rerank/examples/request/RerankRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample2.yaml b/specification/inference/rerank/examples/response/RerankResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample2.yaml rename to specification/inference/rerank/examples/response/RerankResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample3.yaml b/specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample3.yaml rename to specification/inference/sparse_embedding/examples/request/SparseEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample3.yaml b/specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample3.yaml rename to specification/inference/sparse_embedding/examples/response/SparseEmbeddingResponseExample1.yaml diff --git a/specification/inference/inference/examples/request/InferenceRequestExample4.yaml b/specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml similarity index 100% rename from specification/inference/inference/examples/request/InferenceRequestExample4.yaml rename to specification/inference/text_embedding/examples/request/TextEmbeddingRequestExample1.yaml diff --git a/specification/inference/inference/examples/response/InferenceResponseExample4.yaml b/specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml similarity index 100% rename from specification/inference/inference/examples/response/InferenceResponseExample4.yaml rename to specification/inference/text_embedding/examples/response/TextEmbeddingResponseExample1.yaml