Skip to content

Commit f1c2a36

Browse files
committed
Merge branch 'esql_docs_real_applies_to' of https://github.com/craigtaverner/elasticsearch into esql_docs_real_applies_to
2 parents c136dd9 + 31b4443 commit f1c2a36

File tree

770 files changed

+32740
-9553
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

770 files changed

+32740
-9553
lines changed

.buildkite/hooks/pre-command

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -95,13 +95,11 @@ if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then
9595
fi
9696

9797
if [[ "${USE_PERF_CREDENTIALS:-}" == "true" ]]; then
98-
PERF_METRICS_HOST=$(vault read -field=es_host /secret/ci/elastic-elasticsearch/esbench-metics)
99-
PERF_METRICS_INDEX="dummy-micro-benchmarks"
100-
PERF_METRICS_USERNAME=$(vault read -field=es_username /secret/ci/elastic-elasticsearch/esbench-metics)
101-
PERF_METRICS_PASSWORD=$(vault read -field=es_password /secret/ci/elastic-elasticsearch/esbench-metics)
98+
PERF_METRICS_HOST=$(vault read -field=es_host /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
99+
PERF_METRICS_USERNAME=$(vault read -field=es_user /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
100+
PERF_METRICS_PASSWORD=$(vault read -field=es_password /secret/ci/elastic-elasticsearch/microbenchmarks-metrics)
102101

103102
export PERF_METRICS_HOST
104-
export PERF_METRICS_INDEX
105103
export PERF_METRICS_USERNAME
106104
export PERF_METRICS_PASSWORD
107105
fi

.buildkite/scripts/index-micro-benchmark-results.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
jq -c '.[]' "benchmarks/build/result.json" | while read -r doc; do
44
doc=$(echo "$doc" | jq --argjson timestamp "$(date +%s000)" '. + {"@timestamp": $timestamp}')
55
echo "Indexing $(echo "$doc" | jq -r '.benchmark')"
6-
curl -s -X POST "https://$PERF_METRICS_HOST/$PERF_METRICS_INDEX/_doc" \
6+
curl -s -X POST "https://$PERF_METRICS_HOST/metrics-microbenchmarks-default/_doc" \
77
-u "$PERF_METRICS_USERNAME:$PERF_METRICS_PASSWORD" \
88
-H 'Content-Type: application/json' \
99
-d "$doc"

benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/AggregatorBenchmark.java

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,7 @@ public class AggregatorBenchmark {
7373
static final int BLOCK_LENGTH = 8 * 1024;
7474
private static final int OP_COUNT = 1024;
7575
private static final int GROUPS = 5;
76+
private static final int TOP_N_LIMIT = 3;
7677

7778
private static final BlockFactory blockFactory = BlockFactory.getInstance(
7879
new NoopCircuitBreaker("noop"),
@@ -90,6 +91,7 @@ public class AggregatorBenchmark {
9091
private static final String TWO_ORDINALS = "two_" + ORDINALS;
9192
private static final String LONGS_AND_BYTES_REFS = LONGS + "_and_" + BYTES_REFS;
9293
private static final String TWO_LONGS_AND_BYTES_REFS = "two_" + LONGS + "_and_" + BYTES_REFS;
94+
private static final String TOP_N_LONGS = "top_n_" + LONGS;
9395

9496
private static final String VECTOR_DOUBLES = "vector_doubles";
9597
private static final String HALF_NULL_DOUBLES = "half_null_doubles";
@@ -147,7 +149,8 @@ static void selfTest() {
147149
TWO_BYTES_REFS,
148150
TWO_ORDINALS,
149151
LONGS_AND_BYTES_REFS,
150-
TWO_LONGS_AND_BYTES_REFS }
152+
TWO_LONGS_AND_BYTES_REFS,
153+
TOP_N_LONGS }
151154
)
152155
public String grouping;
153156

@@ -161,8 +164,7 @@ static void selfTest() {
161164
public String filter;
162165

163166
private static Operator operator(DriverContext driverContext, String grouping, String op, String dataType, String filter) {
164-
165-
if (grouping.equals("none")) {
167+
if (grouping.equals(NONE)) {
166168
return new AggregationOperator(
167169
List.of(supplier(op, dataType, filter).aggregatorFactory(AggregatorMode.SINGLE, List.of(0)).apply(driverContext)),
168170
driverContext
@@ -188,6 +190,9 @@ private static Operator operator(DriverContext driverContext, String grouping, S
188190
new BlockHash.GroupSpec(1, ElementType.LONG),
189191
new BlockHash.GroupSpec(2, ElementType.BYTES_REF)
190192
);
193+
case TOP_N_LONGS -> List.of(
194+
new BlockHash.GroupSpec(0, ElementType.LONG, false, new BlockHash.TopNDef(0, true, true, TOP_N_LIMIT))
195+
);
191196
default -> throw new IllegalArgumentException("unsupported grouping [" + grouping + "]");
192197
};
193198
return new HashAggregationOperator(
@@ -271,10 +276,14 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
271276
case BOOLEANS -> 2;
272277
default -> GROUPS;
273278
};
279+
int availableGroups = switch (grouping) {
280+
case TOP_N_LONGS -> TOP_N_LIMIT;
281+
default -> groups;
282+
};
274283
switch (op) {
275284
case AVG -> {
276285
DoubleBlock dValues = (DoubleBlock) values;
277-
for (int g = 0; g < groups; g++) {
286+
for (int g = 0; g < availableGroups; g++) {
278287
long group = g;
279288
long sum = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum();
280289
long count = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count();
@@ -286,7 +295,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
286295
}
287296
case COUNT -> {
288297
LongBlock lValues = (LongBlock) values;
289-
for (int g = 0; g < groups; g++) {
298+
for (int g = 0; g < availableGroups; g++) {
290299
long group = g;
291300
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).count() * opCount;
292301
if (lValues.getLong(g) != expected) {
@@ -296,7 +305,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
296305
}
297306
case COUNT_DISTINCT -> {
298307
LongBlock lValues = (LongBlock) values;
299-
for (int g = 0; g < groups; g++) {
308+
for (int g = 0; g < availableGroups; g++) {
300309
long group = g;
301310
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).distinct().count();
302311
long count = lValues.getLong(g);
@@ -310,15 +319,15 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
310319
switch (dataType) {
311320
case LONGS -> {
312321
LongBlock lValues = (LongBlock) values;
313-
for (int g = 0; g < groups; g++) {
322+
for (int g = 0; g < availableGroups; g++) {
314323
if (lValues.getLong(g) != (long) g) {
315324
throw new AssertionError(prefix + "expected [" + g + "] but was [" + lValues.getLong(g) + "]");
316325
}
317326
}
318327
}
319328
case DOUBLES -> {
320329
DoubleBlock dValues = (DoubleBlock) values;
321-
for (int g = 0; g < groups; g++) {
330+
for (int g = 0; g < availableGroups; g++) {
322331
if (dValues.getDouble(g) != (long) g) {
323332
throw new AssertionError(prefix + "expected [" + g + "] but was [" + dValues.getDouble(g) + "]");
324333
}
@@ -331,7 +340,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
331340
switch (dataType) {
332341
case LONGS -> {
333342
LongBlock lValues = (LongBlock) values;
334-
for (int g = 0; g < groups; g++) {
343+
for (int g = 0; g < availableGroups; g++) {
335344
long group = g;
336345
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
337346
if (lValues.getLong(g) != expected) {
@@ -341,7 +350,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
341350
}
342351
case DOUBLES -> {
343352
DoubleBlock dValues = (DoubleBlock) values;
344-
for (int g = 0; g < groups; g++) {
353+
for (int g = 0; g < availableGroups; g++) {
345354
long group = g;
346355
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).max().getAsLong();
347356
if (dValues.getDouble(g) != expected) {
@@ -356,7 +365,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
356365
switch (dataType) {
357366
case LONGS -> {
358367
LongBlock lValues = (LongBlock) values;
359-
for (int g = 0; g < groups; g++) {
368+
for (int g = 0; g < availableGroups; g++) {
360369
long group = g;
361370
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
362371
if (lValues.getLong(g) != expected) {
@@ -366,7 +375,7 @@ private static void checkGrouped(String prefix, String grouping, String op, Stri
366375
}
367376
case DOUBLES -> {
368377
DoubleBlock dValues = (DoubleBlock) values;
369-
for (int g = 0; g < groups; g++) {
378+
for (int g = 0; g < availableGroups; g++) {
370379
long group = g;
371380
long expected = LongStream.range(0, BLOCK_LENGTH).filter(l -> l % groups == group).sum() * opCount;
372381
if (dValues.getDouble(g) != expected) {
@@ -391,6 +400,14 @@ private static void checkGroupingBlock(String prefix, String grouping, Block blo
391400
}
392401
}
393402
}
403+
case TOP_N_LONGS -> {
404+
LongBlock groups = (LongBlock) block;
405+
for (int g = 0; g < TOP_N_LIMIT; g++) {
406+
if (groups.getLong(g) != (long) g) {
407+
throw new AssertionError(prefix + "bad group expected [" + g + "] but was [" + groups.getLong(g) + "]");
408+
}
409+
}
410+
}
394411
case INTS -> {
395412
IntBlock groups = (IntBlock) block;
396413
for (int g = 0; g < GROUPS; g++) {
@@ -495,7 +512,7 @@ private static void checkUngrouped(String prefix, String op, String dataType, Pa
495512

496513
private static Page page(BlockFactory blockFactory, String grouping, String blockType) {
497514
Block dataBlock = dataBlock(blockFactory, blockType);
498-
if (grouping.equals("none")) {
515+
if (grouping.equals(NONE)) {
499516
return new Page(dataBlock);
500517
}
501518
List<Block> blocks = groupingBlocks(grouping, blockType);
@@ -564,7 +581,7 @@ private static Block groupingBlock(String grouping, String blockType) {
564581
default -> throw new UnsupportedOperationException("bad grouping [" + grouping + "]");
565582
};
566583
return switch (grouping) {
567-
case LONGS -> {
584+
case TOP_N_LONGS, LONGS -> {
568585
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
569586
for (int i = 0; i < BLOCK_LENGTH; i++) {
570587
for (int v = 0; v < valuesPerGroup; v++) {
Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the "Elastic License
4+
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
* Public License v 1"; you may not use this file except in compliance with, at
6+
* your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
* License v3.0 only", or the "Server Side Public License, v 1".
8+
*/
9+
10+
package org.elasticsearch.benchmark.xcontent;
11+
12+
import org.elasticsearch.benchmark.index.mapper.MapperServiceFactory;
13+
import org.elasticsearch.common.UUIDs;
14+
import org.elasticsearch.common.bytes.BytesReference;
15+
import org.elasticsearch.common.logging.LogConfigurator;
16+
import org.elasticsearch.index.mapper.MapperService;
17+
import org.elasticsearch.index.mapper.SourceToParse;
18+
import org.elasticsearch.xcontent.XContentBuilder;
19+
import org.elasticsearch.xcontent.XContentFactory;
20+
import org.elasticsearch.xcontent.XContentType;
21+
import org.openjdk.jmh.annotations.Benchmark;
22+
import org.openjdk.jmh.annotations.BenchmarkMode;
23+
import org.openjdk.jmh.annotations.Fork;
24+
import org.openjdk.jmh.annotations.Level;
25+
import org.openjdk.jmh.annotations.Measurement;
26+
import org.openjdk.jmh.annotations.Mode;
27+
import org.openjdk.jmh.annotations.OutputTimeUnit;
28+
import org.openjdk.jmh.annotations.Param;
29+
import org.openjdk.jmh.annotations.Scope;
30+
import org.openjdk.jmh.annotations.Setup;
31+
import org.openjdk.jmh.annotations.State;
32+
import org.openjdk.jmh.annotations.Threads;
33+
import org.openjdk.jmh.annotations.Warmup;
34+
import org.openjdk.jmh.infra.Blackhole;
35+
36+
import java.io.IOException;
37+
import java.util.Random;
38+
import java.util.concurrent.TimeUnit;
39+
40+
/**
41+
* Benchmark to measure indexing performance of keyword fields. Used to measure performance impact of skipping
42+
* UTF-8 to UTF-16 conversion during document parsing.
43+
*/
44+
@BenchmarkMode(Mode.AverageTime)
45+
@OutputTimeUnit(TimeUnit.MILLISECONDS)
46+
@State(Scope.Benchmark)
47+
@Fork(1)
48+
@Threads(1)
49+
@Warmup(iterations = 1)
50+
@Measurement(iterations = 5)
51+
public class OptimizedTextBenchmark {
52+
static {
53+
// For Elasticsearch900Lucene101Codec:
54+
LogConfigurator.loadLog4jPlugins();
55+
LogConfigurator.configureESLogging();
56+
LogConfigurator.setNodeName("test");
57+
}
58+
59+
/**
60+
* Total number of documents to index.
61+
*/
62+
@Param("1048576")
63+
private int nDocs;
64+
65+
private MapperService mapperService;
66+
private SourceToParse[] sources;
67+
68+
private String randomValue(int length) {
69+
final String CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
70+
Random random = new Random();
71+
StringBuilder builder = new StringBuilder(length);
72+
for (int i = 0; i < length; i++) {
73+
builder.append(CHARS.charAt(random.nextInt(CHARS.length())));
74+
}
75+
return builder.toString();
76+
}
77+
78+
@Setup(Level.Trial)
79+
public void setup() throws IOException {
80+
mapperService = MapperServiceFactory.create("""
81+
{
82+
"_doc": {
83+
"dynamic": false,
84+
"properties": {
85+
"field": {
86+
"type": "keyword"
87+
}
88+
}
89+
}
90+
}
91+
""");
92+
93+
sources = new SourceToParse[nDocs];
94+
for (int i = 0; i < nDocs; i++) {
95+
XContentBuilder b = XContentFactory.jsonBuilder();
96+
b.startObject().field("field", randomValue(8)).endObject();
97+
sources[i] = new SourceToParse(UUIDs.randomBase64UUID(), BytesReference.bytes(b), XContentType.JSON);
98+
}
99+
}
100+
101+
@Benchmark
102+
public void indexDocuments(final Blackhole bh) {
103+
final var mapper = mapperService.documentMapper();
104+
for (int i = 0; i < nDocs; i++) {
105+
bh.consume(mapper.parse(sources[i]));
106+
}
107+
}
108+
}

0 commit comments

Comments
 (0)