diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 54bb12606e..aaaf27475e 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -73891,17 +73891,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index 99fa28b057..74db807d72 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -48100,17 +48100,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/schema/schema.json b/output/schema/schema.json index aa588d2c31..cf029e4078 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -74545,33 +74545,23 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, + "serverDefault": [], "type": { - "kind": "union_of", - "items": [ - { - "kind": "instance_of", - "type": { - "name": "string", - "namespace": "_builtins" - } - }, - { - "kind": "array_of", - "value": { - "kind": "instance_of", - "type": { - "name": "TokenChar", - "namespace": "_types.analysis" - } - } + "kind": "array_of", + "value": { + "kind": "instance_of", + "type": { + "name": "TokenChar", + "namespace": "_types.analysis" } - ] + } } } ], - "specLocation": "_types/analysis/tokenizers.ts#L48-L54" + "specLocation": "_types/analysis/tokenizers.ts#L48-L58" }, { "kind": "interface", @@ -76609,7 +76599,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L65-L71" + "specLocation": "_types/analysis/tokenizers.ts#L69-L75" }, { "kind": "interface", @@ -77052,7 +77042,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L73-L75" + "specLocation": "_types/analysis/tokenizers.ts#L77-L79" }, { "kind": "interface", @@ -77260,7 +77250,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L77-L79" + "specLocation": "_types/analysis/tokenizers.ts#L81-L83" }, { "kind": "interface", @@ -77567,6 +77557,7 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, "serverDefault": [], @@ -77582,7 +77573,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L81-L90" + "specLocation": "_types/analysis/tokenizers.ts#L85-L95" }, { "kind": "interface", @@ -77974,7 +77965,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L92-L99" + "specLocation": "_types/analysis/tokenizers.ts#L97-L104" }, { "kind": "interface", @@ -78315,7 +78306,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L101-L106" + "specLocation": "_types/analysis/tokenizers.ts#L106-L111" }, { "kind": "interface", @@ -79237,7 +79228,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L113-L116" + "specLocation": "_types/analysis/tokenizers.ts#L118-L121" }, { "kind": "interface", @@ -79272,7 +79263,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L108-L111" + "specLocation": "_types/analysis/tokenizers.ts#L113-L116" }, { "kind": "interface", @@ -79677,7 +79668,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L118-L121" + "specLocation": "_types/analysis/tokenizers.ts#L123-L126" }, { "kind": "interface", @@ -80349,7 +80340,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L123-L125" + "specLocation": "_types/analysis/tokenizers.ts#L128-L130" }, { "kind": "enum", @@ -80377,7 +80368,7 @@ "name": "TokenChar", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L56-L63" + "specLocation": "_types/analysis/tokenizers.ts#L60-L67" }, { "kind": "type_alias", @@ -80981,7 +80972,7 @@ "name": "Tokenizer", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L137-L142", + "specLocation": "_types/analysis/tokenizers.ts#L142-L147", "type": { "kind": "union_of", "items": [ @@ -81029,7 +81020,7 @@ "name": "TokenizerDefinition", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L144-L167", + "specLocation": "_types/analysis/tokenizers.ts#L149-L172", "type": { "kind": "union_of", "items": [ @@ -81314,7 +81305,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L127-L130" + "specLocation": "_types/analysis/tokenizers.ts#L132-L135" }, { "kind": "interface", @@ -81442,7 +81433,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L132-L135" + "specLocation": "_types/analysis/tokenizers.ts#L137-L140" }, { "kind": "interface", diff --git a/output/typescript/types.ts b/output/typescript/types.ts index 724a36b60a..5e97c6dc41 100644 --- a/output/typescript/types.ts +++ b/output/typescript/types.ts @@ -4744,7 +4744,7 @@ export interface AnalysisEdgeNGramTokenizer extends AnalysisTokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | AnalysisTokenChar[] + token_chars?: AnalysisTokenChar[] } export interface AnalysisElisionTokenFilter extends AnalysisTokenFilterBase { diff --git a/specification/_types/analysis/tokenizers.ts b/specification/_types/analysis/tokenizers.ts index 649b555eea..591e5c357f 100644 --- a/specification/_types/analysis/tokenizers.ts +++ b/specification/_types/analysis/tokenizers.ts @@ -50,7 +50,11 @@ export class EdgeNGramTokenizer extends TokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | TokenChar[] + /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough + * @server_default [] + */ + token_chars?: TokenChar[] } export enum TokenChar { @@ -84,6 +88,7 @@ export class NGramTokenizer extends TokenizerBase { max_gram?: integer min_gram?: integer /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough * @server_default [] */ token_chars?: TokenChar[]