Skip to content

Commit 36b32ae

Browse files
committed
[DOCS] Sort analyzers, tokenizers, and token filters alphabetically (#48068)
1 parent 66f3455 commit 36b32ae

File tree

5 files changed

+67
-73
lines changed

5 files changed

+67
-73
lines changed

docs/reference/analysis/analyzers.asciidoc

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -56,21 +56,20 @@ If you do not find an analyzer suitable for your needs, you can create a
5656

5757
include::analyzers/configuring.asciidoc[]
5858

59-
include::analyzers/standard-analyzer.asciidoc[]
59+
include::analyzers/fingerprint-analyzer.asciidoc[]
6060

61-
include::analyzers/simple-analyzer.asciidoc[]
61+
include::analyzers/keyword-analyzer.asciidoc[]
6262

63-
include::analyzers/whitespace-analyzer.asciidoc[]
63+
include::analyzers/lang-analyzer.asciidoc[]
6464

65-
include::analyzers/stop-analyzer.asciidoc[]
65+
include::analyzers/pattern-analyzer.asciidoc[]
6666

67-
include::analyzers/keyword-analyzer.asciidoc[]
67+
include::analyzers/simple-analyzer.asciidoc[]
6868

69-
include::analyzers/pattern-analyzer.asciidoc[]
69+
include::analyzers/standard-analyzer.asciidoc[]
7070

71-
include::analyzers/lang-analyzer.asciidoc[]
71+
include::analyzers/stop-analyzer.asciidoc[]
7272

73-
include::analyzers/fingerprint-analyzer.asciidoc[]
73+
include::analyzers/whitespace-analyzer.asciidoc[]
7474

7575
include::analyzers/custom-analyzer.asciidoc[]
76-

docs/reference/analysis/tokenfilters.asciidoc

Lines changed: 47 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -6,99 +6,100 @@ Token filters accept a stream of tokens from a
66
(eg lowercasing), delete tokens (eg remove stopwords)
77
or add tokens (eg synonyms).
88

9-
Elasticsearch has a number of built in token filters which can be
10-
used to build <<analysis-custom-analyzer,custom analyzers>>.
9+
{es} has a number of built-in token filters you can use
10+
to build <<analysis-custom-analyzer,custom analyzers>>.
1111

12-
include::tokenfilters/asciifolding-tokenfilter.asciidoc[]
1312

14-
include::tokenfilters/flatten-graph-tokenfilter.asciidoc[]
13+
include::tokenfilters/apostrophe-tokenfilter.asciidoc[]
1514

16-
include::tokenfilters/length-tokenfilter.asciidoc[]
15+
include::tokenfilters/asciifolding-tokenfilter.asciidoc[]
1716

18-
include::tokenfilters/lowercase-tokenfilter.asciidoc[]
17+
include::tokenfilters/cjk-bigram-tokenfilter.asciidoc[]
1918

20-
include::tokenfilters/uppercase-tokenfilter.asciidoc[]
19+
include::tokenfilters/cjk-width-tokenfilter.asciidoc[]
2120

22-
include::tokenfilters/ngram-tokenfilter.asciidoc[]
21+
include::tokenfilters/classic-tokenfilter.asciidoc[]
2322

24-
include::tokenfilters/edgengram-tokenfilter.asciidoc[]
23+
include::tokenfilters/common-grams-tokenfilter.asciidoc[]
2524

26-
include::tokenfilters/porterstem-tokenfilter.asciidoc[]
25+
include::tokenfilters/compound-word-tokenfilter.asciidoc[]
2726

28-
include::tokenfilters/shingle-tokenfilter.asciidoc[]
27+
include::tokenfilters/condition-tokenfilter.asciidoc[]
2928

30-
include::tokenfilters/stop-tokenfilter.asciidoc[]
29+
include::tokenfilters/decimal-digit-tokenfilter.asciidoc[]
3130

32-
include::tokenfilters/word-delimiter-tokenfilter.asciidoc[]
31+
include::tokenfilters/delimited-payload-tokenfilter.asciidoc[]
3332

34-
include::tokenfilters/word-delimiter-graph-tokenfilter.asciidoc[]
33+
include::tokenfilters/edgengram-tokenfilter.asciidoc[]
3534

36-
include::tokenfilters/multiplexer-tokenfilter.asciidoc[]
35+
include::tokenfilters/elision-tokenfilter.asciidoc[]
3736

38-
include::tokenfilters/condition-tokenfilter.asciidoc[]
37+
include::tokenfilters/fingerprint-tokenfilter.asciidoc[]
3938

40-
include::tokenfilters/predicate-tokenfilter.asciidoc[]
39+
include::tokenfilters/flatten-graph-tokenfilter.asciidoc[]
4140

42-
include::tokenfilters/stemmer-tokenfilter.asciidoc[]
41+
include::tokenfilters/hunspell-tokenfilter.asciidoc[]
4342

44-
include::tokenfilters/stemmer-override-tokenfilter.asciidoc[]
43+
include::tokenfilters/keep-types-tokenfilter.asciidoc[]
44+
45+
include::tokenfilters/keep-words-tokenfilter.asciidoc[]
4546

4647
include::tokenfilters/keyword-marker-tokenfilter.asciidoc[]
4748

4849
include::tokenfilters/keyword-repeat-tokenfilter.asciidoc[]
4950

5051
include::tokenfilters/kstem-tokenfilter.asciidoc[]
5152

52-
include::tokenfilters/snowball-tokenfilter.asciidoc[]
53-
54-
include::tokenfilters/phonetic-tokenfilter.asciidoc[]
55-
56-
include::tokenfilters/synonym-tokenfilter.asciidoc[]
53+
include::tokenfilters/length-tokenfilter.asciidoc[]
5754

58-
include::tokenfilters/synonym-graph-tokenfilter.asciidoc[]
55+
include::tokenfilters/limit-token-count-tokenfilter.asciidoc[]
5956

60-
include::tokenfilters/compound-word-tokenfilter.asciidoc[]
57+
include::tokenfilters/lowercase-tokenfilter.asciidoc[]
6158

62-
include::tokenfilters/reverse-tokenfilter.asciidoc[]
59+
include::tokenfilters/minhash-tokenfilter.asciidoc[]
6360

64-
include::tokenfilters/elision-tokenfilter.asciidoc[]
61+
include::tokenfilters/multiplexer-tokenfilter.asciidoc[]
6562

66-
include::tokenfilters/truncate-tokenfilter.asciidoc[]
63+
include::tokenfilters/ngram-tokenfilter.asciidoc[]
6764

68-
include::tokenfilters/unique-tokenfilter.asciidoc[]
65+
include::tokenfilters/normalization-tokenfilter.asciidoc[]
6966

7067
include::tokenfilters/pattern-capture-tokenfilter.asciidoc[]
7168

7269
include::tokenfilters/pattern_replace-tokenfilter.asciidoc[]
7370

74-
include::tokenfilters/trim-tokenfilter.asciidoc[]
71+
include::tokenfilters/phonetic-tokenfilter.asciidoc[]
7572

76-
include::tokenfilters/limit-token-count-tokenfilter.asciidoc[]
73+
include::tokenfilters/porterstem-tokenfilter.asciidoc[]
7774

78-
include::tokenfilters/hunspell-tokenfilter.asciidoc[]
75+
include::tokenfilters/predicate-tokenfilter.asciidoc[]
7976

80-
include::tokenfilters/common-grams-tokenfilter.asciidoc[]
77+
include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[]
8178

82-
include::tokenfilters/normalization-tokenfilter.asciidoc[]
79+
include::tokenfilters/reverse-tokenfilter.asciidoc[]
8380

84-
include::tokenfilters/cjk-width-tokenfilter.asciidoc[]
81+
include::tokenfilters/shingle-tokenfilter.asciidoc[]
8582

86-
include::tokenfilters/cjk-bigram-tokenfilter.asciidoc[]
83+
include::tokenfilters/snowball-tokenfilter.asciidoc[]
8784

88-
include::tokenfilters/delimited-payload-tokenfilter.asciidoc[]
85+
include::tokenfilters/stemmer-tokenfilter.asciidoc[]
8986

90-
include::tokenfilters/keep-words-tokenfilter.asciidoc[]
87+
include::tokenfilters/stemmer-override-tokenfilter.asciidoc[]
9188

92-
include::tokenfilters/keep-types-tokenfilter.asciidoc[]
89+
include::tokenfilters/stop-tokenfilter.asciidoc[]
9390

94-
include::tokenfilters/classic-tokenfilter.asciidoc[]
91+
include::tokenfilters/synonym-tokenfilter.asciidoc[]
9592

96-
include::tokenfilters/apostrophe-tokenfilter.asciidoc[]
93+
include::tokenfilters/synonym-graph-tokenfilter.asciidoc[]
9794

98-
include::tokenfilters/decimal-digit-tokenfilter.asciidoc[]
95+
include::tokenfilters/trim-tokenfilter.asciidoc[]
9996

100-
include::tokenfilters/fingerprint-tokenfilter.asciidoc[]
97+
include::tokenfilters/truncate-tokenfilter.asciidoc[]
10198

102-
include::tokenfilters/minhash-tokenfilter.asciidoc[]
99+
include::tokenfilters/unique-tokenfilter.asciidoc[]
103100

104-
include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[]
101+
include::tokenfilters/uppercase-tokenfilter.asciidoc[]
102+
103+
include::tokenfilters/word-delimiter-tokenfilter.asciidoc[]
104+
105+
include::tokenfilters/word-delimiter-graph-tokenfilter.asciidoc[]

docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,7 @@ The response will be:
7474

7575
Note how only the `<NUM>` token is in the output.
7676

77+
[discrete]
7778
=== Exclude mode settings example
7879

7980
If the `mode` parameter is set to `exclude` like in the following example:

docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ PUT /test_index
167167
Using `synonyms_path` to define WordNet synonyms in a file is supported
168168
as well.
169169

170-
170+
[discrete]
171171
=== Parsing synonym files
172172

173173
Elasticsearch will use the token filters preceding the synonym filter

docs/reference/analysis/tokenizers.asciidoc

Lines changed: 10 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -121,41 +121,34 @@ path, splits on the path separator, and emits a term for each component in the
121121
tree, e.g. `/foo/bar/baz` -> `[/foo, /foo/bar, /foo/bar/baz ]`.
122122

123123

124+
include::tokenizers/chargroup-tokenizer.asciidoc[]
124125

126+
include::tokenizers/classic-tokenizer.asciidoc[]
125127

128+
include::tokenizers/edgengram-tokenizer.asciidoc[]
126129

127-
include::tokenizers/standard-tokenizer.asciidoc[]
130+
include::tokenizers/keyword-tokenizer.asciidoc[]
128131

129132
include::tokenizers/letter-tokenizer.asciidoc[]
130133

131134
include::tokenizers/lowercase-tokenizer.asciidoc[]
132135

133-
include::tokenizers/whitespace-tokenizer.asciidoc[]
134-
135-
include::tokenizers/uaxurlemail-tokenizer.asciidoc[]
136-
137-
include::tokenizers/classic-tokenizer.asciidoc[]
138-
139-
include::tokenizers/thai-tokenizer.asciidoc[]
140-
141-
142136
include::tokenizers/ngram-tokenizer.asciidoc[]
143137

144-
include::tokenizers/edgengram-tokenizer.asciidoc[]
145-
138+
include::tokenizers/pathhierarchy-tokenizer.asciidoc[]
146139

147-
include::tokenizers/keyword-tokenizer.asciidoc[]
140+
include::tokenizers/pathhierarchy-tokenizer-examples.asciidoc[]
148141

149142
include::tokenizers/pattern-tokenizer.asciidoc[]
150143

151-
include::tokenizers/chargroup-tokenizer.asciidoc[]
152-
153144
include::tokenizers/simplepattern-tokenizer.asciidoc[]
154145

155146
include::tokenizers/simplepatternsplit-tokenizer.asciidoc[]
156147

157-
include::tokenizers/pathhierarchy-tokenizer.asciidoc[]
148+
include::tokenizers/standard-tokenizer.asciidoc[]
158149

159-
include::tokenizers/pathhierarchy-tokenizer-examples.asciidoc[]
150+
include::tokenizers/thai-tokenizer.asciidoc[]
160151

152+
include::tokenizers/uaxurlemail-tokenizer.asciidoc[]
161153

154+
include::tokenizers/whitespace-tokenizer.asciidoc[]

0 commit comments

Comments
 (0)