Skip to content

Commit 580e8a6

Browse files
Increase max number of dimensions from 16 to 21 (#95340)
Here we increase the maximum number of dimension fields for a time series index from 16 to 21. The maximum size of each. The maximum allowed size for a field in Lucene is 32 Kb. When encoding the tsid we include all dimension field names and all dimension field values. As a result we have, max field name 512 bytes, max field value 1024 bytes. 32 Kb / (512 + 1024) bytes = 21.3. So the maximum number of dimensions is 21.
1 parent cb04885 commit 580e8a6

File tree

3 files changed

+202
-2
lines changed

3 files changed

+202
-2
lines changed
Lines changed: 200 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,200 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the Elastic License
4+
* 2.0 and the Server Side Public License, v 1; you may not use this file except
5+
* in compliance with, at your election, the Elastic License 2.0 or the Server
6+
* Side Public License, v 1.
7+
*/
8+
9+
package org.elasticsearch.timeseries.support;
10+
11+
import org.elasticsearch.action.index.IndexResponse;
12+
import org.elasticsearch.cluster.metadata.IndexMetadata;
13+
import org.elasticsearch.common.settings.Settings;
14+
import org.elasticsearch.core.CheckedConsumer;
15+
import org.elasticsearch.index.IndexMode;
16+
import org.elasticsearch.index.IndexSettings;
17+
import org.elasticsearch.index.mapper.DocumentParsingException;
18+
import org.elasticsearch.index.mapper.MapperService;
19+
import org.elasticsearch.rest.RestStatus;
20+
import org.elasticsearch.test.ESIntegTestCase;
21+
import org.elasticsearch.xcontent.XContentBuilder;
22+
import org.elasticsearch.xcontent.json.JsonXContent;
23+
24+
import java.io.IOException;
25+
import java.time.Instant;
26+
import java.util.ArrayList;
27+
import java.util.HashMap;
28+
import java.util.List;
29+
import java.util.Map;
30+
import java.util.function.Supplier;
31+
32+
import static org.hamcrest.Matchers.equalTo;
33+
34+
public class TimeSeriesDimensionsLimitIT extends ESIntegTestCase {
35+
36+
public void testDimensionFieldNameLimit() throws IOException {
37+
int dimensionFieldLimit = 21;
38+
final String dimensionFieldName = randomAlphaOfLength(randomIntBetween(513, 1024));
39+
createTimeSeriesIndex(mapping -> {
40+
mapping.startObject("routing_field").field("type", "keyword").field("time_series_dimension", true).endObject();
41+
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject();
42+
},
43+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
44+
() -> List.of("routing_field"),
45+
dimensionFieldLimit
46+
);
47+
final Exception ex = expectThrows(
48+
DocumentParsingException.class,
49+
() -> client().prepareIndex("test")
50+
.setSource(
51+
"routing_field",
52+
randomAlphaOfLength(10),
53+
dimensionFieldName,
54+
randomAlphaOfLength(1024),
55+
"gauge",
56+
randomIntBetween(10, 20),
57+
"@timestamp",
58+
Instant.now().toEpochMilli()
59+
)
60+
.get()
61+
);
62+
assertThat(
63+
ex.getCause().getMessage(),
64+
equalTo(
65+
"Dimension name must be less than [512] bytes but [" + dimensionFieldName + "] was [" + dimensionFieldName.length() + "]."
66+
)
67+
);
68+
}
69+
70+
public void testDimensionFieldValueLimit() throws IOException {
71+
int dimensionFieldLimit = 21;
72+
createTimeSeriesIndex(
73+
mapping -> mapping.startObject("field").field("type", "keyword").field("time_series_dimension", true).endObject(),
74+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
75+
() -> List.of("field"),
76+
dimensionFieldLimit
77+
);
78+
long startTime = Instant.now().toEpochMilli();
79+
client().prepareIndex("test")
80+
.setSource("field", randomAlphaOfLength(1024), "gauge", randomIntBetween(10, 20), "@timestamp", startTime)
81+
.get();
82+
final Exception ex = expectThrows(
83+
DocumentParsingException.class,
84+
() -> client().prepareIndex("test")
85+
.setSource("field", randomAlphaOfLength(1025), "gauge", randomIntBetween(10, 20), "@timestamp", startTime + 1)
86+
.get()
87+
);
88+
assertThat(ex.getCause().getMessage(), equalTo("Dimension fields must be less than [1024] bytes but was [1025]."));
89+
}
90+
91+
public void testTotalNumberOfDimensionFieldsLimit() {
92+
int dimensionFieldLimit = 21;
93+
final Exception ex = expectThrows(IllegalArgumentException.class, () -> createTimeSeriesIndex(mapping -> {
94+
mapping.startObject("routing_field").field("type", "keyword").field("time_series_dimension", true).endObject();
95+
for (int i = 0; i < dimensionFieldLimit; i++) {
96+
mapping.startObject(randomAlphaOfLength(10)).field("type", "keyword").field("time_series_dimension", true).endObject();
97+
}
98+
},
99+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
100+
() -> List.of("routing_field"),
101+
dimensionFieldLimit
102+
));
103+
104+
assertThat(ex.getMessage(), equalTo("Limit of total dimension fields [" + dimensionFieldLimit + "] has been exceeded"));
105+
}
106+
107+
public void testTotalNumberOfDimensionFieldsDefaultLimit() {
108+
int dimensionFieldLimit = 21;
109+
final Exception ex = expectThrows(IllegalArgumentException.class, () -> createTimeSeriesIndex(mapping -> {
110+
mapping.startObject("routing_field").field("type", "keyword").field("time_series_dimension", true).endObject();
111+
for (int i = 0; i < dimensionFieldLimit; i++) {
112+
mapping.startObject(randomAlphaOfLength(10)).field("type", "keyword").field("time_series_dimension", true).endObject();
113+
}
114+
},
115+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
116+
() -> List.of("routing_field"),
117+
null // NOTE: using default field limit
118+
));
119+
120+
assertThat(ex.getMessage(), equalTo("Limit of total dimension fields [" + dimensionFieldLimit + "] has been exceeded"));
121+
}
122+
123+
public void testTotalDimensionFieldsSizeLuceneLimit() throws IOException {
124+
int dimensionFieldLimit = 21;
125+
final List<String> dimensionFieldNames = new ArrayList<>();
126+
createTimeSeriesIndex(mapping -> {
127+
for (int i = 0; i < dimensionFieldLimit; i++) {
128+
String dimensionFieldName = randomAlphaOfLength(512);
129+
dimensionFieldNames.add(dimensionFieldName);
130+
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject();
131+
}
132+
},
133+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
134+
() -> List.of(dimensionFieldNames.get(0)),
135+
dimensionFieldLimit
136+
);
137+
138+
final Map<String, Object> source = new HashMap<>();
139+
source.put("gauge", randomIntBetween(10, 20));
140+
source.put("@timestamp", Instant.now().toEpochMilli());
141+
for (int i = 0; i < dimensionFieldLimit; i++) {
142+
source.put(dimensionFieldNames.get(i), randomAlphaOfLength(1024));
143+
}
144+
final IndexResponse indexResponse = client().prepareIndex("test").setSource(source).get();
145+
assertEquals(RestStatus.CREATED.getStatus(), indexResponse.status().getStatus());
146+
}
147+
148+
public void testTotalDimensionFieldsSizeLuceneLimitPlusOne() throws IOException {
149+
int dimensionFieldLimit = 22;
150+
final List<String> dimensionFieldNames = new ArrayList<>();
151+
createTimeSeriesIndex(mapping -> {
152+
for (int i = 0; i < dimensionFieldLimit; i++) {
153+
String dimensionFieldName = randomAlphaOfLength(512);
154+
dimensionFieldNames.add(dimensionFieldName);
155+
mapping.startObject(dimensionFieldName).field("type", "keyword").field("time_series_dimension", true).endObject();
156+
}
157+
},
158+
mapping -> mapping.startObject("gauge").field("type", "integer").field("time_series_metric", "gauge").endObject(),
159+
() -> List.of(dimensionFieldNames.get(0)),
160+
dimensionFieldLimit
161+
);
162+
163+
final Map<String, Object> source = new HashMap<>();
164+
source.put("routing_field", randomAlphaOfLength(1024));
165+
source.put("gauge", randomIntBetween(10, 20));
166+
source.put("@timestamp", Instant.now().toEpochMilli());
167+
for (int i = 0; i < dimensionFieldLimit; i++) {
168+
source.put(dimensionFieldNames.get(i), randomAlphaOfLength(1024));
169+
}
170+
final Exception ex = expectThrows(DocumentParsingException.class, () -> client().prepareIndex("test").setSource(source).get());
171+
assertEquals("_tsid longer than [32766] bytes [33903].", ex.getCause().getMessage());
172+
}
173+
174+
private void createTimeSeriesIndex(
175+
final CheckedConsumer<XContentBuilder, IOException> dimensions,
176+
final CheckedConsumer<XContentBuilder, IOException> metrics,
177+
final Supplier<List<String>> routingPaths,
178+
final Integer dimensionsFieldLimit
179+
) throws IOException {
180+
XContentBuilder mapping = JsonXContent.contentBuilder();
181+
mapping.startObject().startObject("properties");
182+
mapping.startObject("@timestamp").field("type", "date").endObject();
183+
metrics.accept(mapping);
184+
dimensions.accept(mapping);
185+
mapping.endObject().endObject();
186+
187+
Settings.Builder settings = Settings.builder()
188+
.put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES)
189+
.putList(IndexMetadata.INDEX_ROUTING_PATH.getKey(), routingPaths.get())
190+
.put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2000-01-08T23:40:53.384Z")
191+
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z");
192+
193+
if (dimensionsFieldLimit != null) {
194+
settings.put(MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING.getKey(), dimensionsFieldLimit);
195+
}
196+
197+
client().admin().indices().prepareCreate("test").setSettings(settings.build()).setMapping(mapping).get();
198+
}
199+
200+
}

server/src/main/java/org/elasticsearch/index/mapper/MapperService.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ public enum MergeReason {
110110
);
111111
public static final Setting<Long> INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING = Setting.longSetting(
112112
"index.mapping.dimension_fields.limit",
113-
16,
113+
21,
114114
0,
115115
Property.Dynamic,
116116
Property.IndexScope

server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ public void testTooManyDimensionFields() {
325325
int max;
326326
Settings settings;
327327
if (randomBoolean()) {
328-
max = 16; // By default no more than 16 dimensions per document are supported
328+
max = 21; // By default no more than 21 dimensions per document are supported
329329
settings = getIndexSettings();
330330
} else {
331331
max = between(1, 10000);

0 commit comments

Comments
 (0)