diff --git a/output/openapi/elasticsearch-openapi.json b/output/openapi/elasticsearch-openapi.json index 2979491a9e..3d940cdf84 100644 --- a/output/openapi/elasticsearch-openapi.json +++ b/output/openapi/elasticsearch-openapi.json @@ -72775,17 +72775,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/openapi/elasticsearch-serverless-openapi.json b/output/openapi/elasticsearch-serverless-openapi.json index 5b0fc2640a..d27fba0fcd 100644 --- a/output/openapi/elasticsearch-serverless-openapi.json +++ b/output/openapi/elasticsearch-serverless-openapi.json @@ -47230,17 +47230,11 @@ "type": "number" }, "token_chars": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "array", - "items": { - "$ref": "#/components/schemas/_types.analysis.TokenChar" - } - } - ] + "default": [], + "type": "array", + "items": { + "$ref": "#/components/schemas/_types.analysis.TokenChar" + } } }, "required": [ diff --git a/output/schema/schema.json b/output/schema/schema.json index 45f989b1f9..142db7795b 100644 --- a/output/schema/schema.json +++ b/output/schema/schema.json @@ -73824,33 +73824,23 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, + "serverDefault": [], "type": { - "kind": "union_of", - "items": [ - { - "kind": "instance_of", - "type": { - "name": "string", - "namespace": "_builtins" - } - }, - { - "kind": "array_of", - "value": { - "kind": "instance_of", - "type": { - "name": "TokenChar", - "namespace": "_types.analysis" - } - } + "kind": "array_of", + "value": { + "kind": "instance_of", + "type": { + "name": "TokenChar", + "namespace": "_types.analysis" } - ] + } } } ], - "specLocation": "_types/analysis/tokenizers.ts#L48-L54" + "specLocation": "_types/analysis/tokenizers.ts#L48-L58" }, { "kind": "interface", @@ -75888,7 +75878,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L65-L71" + "specLocation": "_types/analysis/tokenizers.ts#L69-L75" }, { "kind": "interface", @@ -76331,7 +76321,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L73-L75" + "specLocation": "_types/analysis/tokenizers.ts#L77-L79" }, { "kind": "interface", @@ -76539,7 +76529,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L77-L79" + "specLocation": "_types/analysis/tokenizers.ts#L81-L83" }, { "kind": "interface", @@ -76846,6 +76836,7 @@ } }, { + "esQuirk": "A comma-separated string is also accepted but the enum array is enough", "name": "token_chars", "required": false, "serverDefault": [], @@ -76861,7 +76852,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L81-L90" + "specLocation": "_types/analysis/tokenizers.ts#L85-L95" }, { "kind": "interface", @@ -77253,7 +77244,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L92-L99" + "specLocation": "_types/analysis/tokenizers.ts#L97-L104" }, { "kind": "interface", @@ -77594,7 +77585,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L101-L106" + "specLocation": "_types/analysis/tokenizers.ts#L106-L111" }, { "kind": "interface", @@ -78516,7 +78507,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L113-L116" + "specLocation": "_types/analysis/tokenizers.ts#L118-L121" }, { "kind": "interface", @@ -78551,7 +78542,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L108-L111" + "specLocation": "_types/analysis/tokenizers.ts#L113-L116" }, { "kind": "interface", @@ -78956,7 +78947,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L118-L121" + "specLocation": "_types/analysis/tokenizers.ts#L123-L126" }, { "kind": "interface", @@ -79628,7 +79619,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L123-L125" + "specLocation": "_types/analysis/tokenizers.ts#L128-L130" }, { "kind": "enum", @@ -79656,7 +79647,7 @@ "name": "TokenChar", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L56-L63" + "specLocation": "_types/analysis/tokenizers.ts#L60-L67" }, { "kind": "type_alias", @@ -80260,7 +80251,7 @@ "name": "Tokenizer", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L137-L142", + "specLocation": "_types/analysis/tokenizers.ts#L142-L147", "type": { "kind": "union_of", "items": [ @@ -80308,7 +80299,7 @@ "name": "TokenizerDefinition", "namespace": "_types.analysis" }, - "specLocation": "_types/analysis/tokenizers.ts#L144-L167", + "specLocation": "_types/analysis/tokenizers.ts#L149-L172", "type": { "kind": "union_of", "items": [ @@ -80593,7 +80584,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L127-L130" + "specLocation": "_types/analysis/tokenizers.ts#L132-L135" }, { "kind": "interface", @@ -80721,7 +80712,7 @@ } } ], - "specLocation": "_types/analysis/tokenizers.ts#L132-L135" + "specLocation": "_types/analysis/tokenizers.ts#L137-L140" }, { "kind": "interface", diff --git a/output/typescript/types.ts b/output/typescript/types.ts index 1f3424af38..5fd7a9e501 100644 --- a/output/typescript/types.ts +++ b/output/typescript/types.ts @@ -4724,7 +4724,7 @@ export interface AnalysisEdgeNGramTokenizer extends AnalysisTokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | AnalysisTokenChar[] + token_chars?: AnalysisTokenChar[] } export interface AnalysisElisionTokenFilter extends AnalysisTokenFilterBase { diff --git a/specification/_types/analysis/tokenizers.ts b/specification/_types/analysis/tokenizers.ts index 649b555eea..591e5c357f 100644 --- a/specification/_types/analysis/tokenizers.ts +++ b/specification/_types/analysis/tokenizers.ts @@ -50,7 +50,11 @@ export class EdgeNGramTokenizer extends TokenizerBase { custom_token_chars?: string max_gram?: integer min_gram?: integer - token_chars?: string | TokenChar[] + /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough + * @server_default [] + */ + token_chars?: TokenChar[] } export enum TokenChar { @@ -84,6 +88,7 @@ export class NGramTokenizer extends TokenizerBase { max_gram?: integer min_gram?: integer /** + * @es_quirk A comma-separated string is also accepted but the enum array is enough * @server_default [] */ token_chars?: TokenChar[]