Skip to content

Commit 9ccdd22

Browse files
authored
Merge branch 'main' into jdk/api-extractor-fix-inherited-access
2 parents 726f286 + aef5c27 commit 9ccdd22

File tree

35 files changed

+1796
-347
lines changed

35 files changed

+1796
-347
lines changed

docs/changelog/131485.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
pr: 131485
2+
summary: Run single phase aggregation when possible
3+
area: ES|QL
4+
type: enhancement
5+
issues: []

docs/changelog/135776.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 135776
2+
summary: Fix KQL case-sensitivity for keyword fields in ES|QL
3+
area: Search
4+
type: bug
5+
issues:
6+
- 135772

muted-tests.yml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -609,6 +609,12 @@ tests:
609609
- class: org.elasticsearch.xpack.esql.ccq.AllSupportedFieldsIT
610610
method: testFetchDenseVector {pref=null mode=time_series}
611611
issue: https://github.com/elastic/elasticsearch/issues/135762
612+
- class: org.elasticsearch.xpack.esql.expression.function.aggregate.IrateTests
613+
method: testGroupingAggregate {TestCase=<positive longs>}
614+
issue: https://github.com/elastic/elasticsearch/issues/135775
615+
- class: org.elasticsearch.xpack.esql.qa.single_node.GenerativeIT
616+
method: test
617+
issue: https://github.com/elastic/elasticsearch/issues/135787
612618

613619
# Examples:
614620
#
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"inference.put_contextualai": {
3+
"documentation": {
4+
"url": "https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-contextualai",
5+
"description": "Create an Contextual AI inference endpoint"
6+
},
7+
"stability": "stable",
8+
"visibility": "public",
9+
"headers": {
10+
"accept": [
11+
"application/json"
12+
],
13+
"content_type": [
14+
"application/json"
15+
]
16+
},
17+
"url": {
18+
"paths": [
19+
{
20+
"path": "/_inference/{task_type}/{contextualai_inference_id}",
21+
"methods": [
22+
"PUT"
23+
],
24+
"parts": {
25+
"task_type": {
26+
"type": "string",
27+
"description": "The task type"
28+
},
29+
"contextualai_inference_id": {
30+
"type": "string",
31+
"description": "The inference Id"
32+
}
33+
}
34+
}
35+
]
36+
},
37+
"body": {
38+
"description": "The inference endpoint's task and service settings"
39+
}
40+
}
41+
}

server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java

Lines changed: 45 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
import org.elasticsearch.common.settings.IndexScopedSettings;
3535
import org.elasticsearch.common.settings.Settings;
3636
import org.elasticsearch.common.util.set.Sets;
37+
import org.elasticsearch.core.Assertions;
3738
import org.elasticsearch.core.Nullable;
3839
import org.elasticsearch.core.TimeValue;
3940
import org.elasticsearch.core.Tuple;
@@ -818,7 +819,7 @@ void validateIndexTemplateV2(ProjectMetadata projectMetadata, String name, Compo
818819
var templateToValidate = indexTemplate.toBuilder().template(Template.builder(finalTemplate).settings(finalSettings)).build();
819820

820821
validate(name, templateToValidate, additionalSettings);
821-
validateDataStreamsStillReferenced(projectMetadata, name, templateToValidate);
822+
maybeValidateDataStreamsStillReferenced(projectMetadata, name, templateToValidate);
822823
validateLifecycle(componentTemplates, name, templateToValidate, globalRetentionSettings.get(false));
823824
validateDataStreamOptions(componentTemplates, name, templateToValidate, globalRetentionSettings.get(true));
824825

@@ -944,6 +945,43 @@ static void validateDataStreamOptions(
944945
}
945946
}
946947

948+
/**
949+
* Maybe runs {@link #validateDataStreamsStillReferenced} if it looks like the new composite template could change data stream coverage.
950+
*/
951+
private static void maybeValidateDataStreamsStillReferenced(
952+
ProjectMetadata project,
953+
String templateName,
954+
ComposableIndexTemplate newTemplate
955+
) {
956+
final ComposableIndexTemplate existingTemplate = project.templatesV2().get(templateName);
957+
final Settings existingSettings = Optional.ofNullable(existingTemplate)
958+
.map(ComposableIndexTemplate::template)
959+
.map(Template::settings)
960+
.orElse(Settings.EMPTY);
961+
final Settings newSettings = Optional.ofNullable(newTemplate)
962+
.map(ComposableIndexTemplate::template)
963+
.map(Template::settings)
964+
.orElse(Settings.EMPTY);
965+
// We check whether anything relevant has changed that could affect data stream coverage and return early if not.
966+
// These checks are based on the implementation of findV2Template and the data stream template check in this method.
967+
// If we're adding a new template, we do the full check in case this template's priority changes coverage.
968+
if (existingTemplate != null
969+
&& Objects.equals(existingTemplate.indexPatterns(), newTemplate.indexPatterns())
970+
&& Objects.equals(existingSettings.get(IndexMetadata.SETTING_INDEX_HIDDEN), newSettings.get(IndexMetadata.SETTING_INDEX_HIDDEN))
971+
&& Objects.equals(existingTemplate.getDataStreamTemplate() != null, newTemplate.getDataStreamTemplate() != null)
972+
&& Objects.equals(existingTemplate.priorityOrZero(), newTemplate.priorityOrZero())) {
973+
if (Assertions.ENABLED) {
974+
try {
975+
validateDataStreamsStillReferenced(project, templateName, newTemplate);
976+
} catch (IllegalArgumentException e) {
977+
assert false : "Data stream reference validation took a shortcut but the full check failed: " + e.getMessage();
978+
}
979+
}
980+
return;
981+
}
982+
validateDataStreamsStillReferenced(project, templateName, newTemplate);
983+
}
984+
947985
/**
948986
* Validate that by changing or adding {@code newTemplate}, there are
949987
* no unreferenced data streams. Note that this scenario is still possible
@@ -955,18 +993,16 @@ private static void validateDataStreamsStillReferenced(
955993
String templateName,
956994
ComposableIndexTemplate newTemplate
957995
) {
958-
final Set<String> dataStreams = project.dataStreams()
959-
.entrySet()
960-
.stream()
961-
.filter(entry -> entry.getValue().isSystem() == false)
962-
.map(Map.Entry::getKey)
963-
.collect(Collectors.toSet());
964-
965996
Function<Map<String, ComposableIndexTemplate>, Set<String>> findUnreferencedDataStreams = composableTemplates -> {
966997
final Set<String> unreferenced = new HashSet<>();
967998
// For each data stream that we have, see whether it's covered by a different
968999
// template (which is great), or whether it's now uncovered by any template
969-
for (String dataStream : dataStreams) {
1000+
for (var dataStreamEntry : project.dataStreams().entrySet()) {
1001+
// Exclude system data streams
1002+
if (dataStreamEntry.getValue().isSystem()) {
1003+
continue;
1004+
}
1005+
final String dataStream = dataStreamEntry.getKey();
9701006
final String matchingTemplate = findV2Template(project, composableTemplates.entrySet(), dataStream, false, false);
9711007
if (matchingTemplate == null) {
9721008
unreferenced.add(dataStream);
Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,163 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the "Elastic License
4+
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
5+
* Public License v 1"; you may not use this file except in compliance with, at
6+
* your election, the "Elastic License 2.0", the "GNU Affero General Public
7+
* License v3.0 only", or the "Server Side Public License, v 1".
8+
*/
9+
10+
package org.elasticsearch.common.util;
11+
12+
import com.carrotsearch.hppc.BitMixer;
13+
14+
import org.elasticsearch.core.Releasables;
15+
16+
/**
17+
* Specialized hash table implementation maps N int values to ids.
18+
* Collisions are resolved with open addressing and
19+
* linear probing, growth is smooth thanks to {@link BigArrays} and capacity
20+
* is always a multiple of N for faster identification of buckets.
21+
* This class is not thread-safe.
22+
*/
23+
// IDs are internally stored as id + 1 so that 0 encodes for an empty slot
24+
public final class IntNHash extends AbstractHash {
25+
private IntArray keyArray;
26+
private final int keySize;
27+
private final int[] scratch;
28+
29+
// Constructor with configurable capacity and default maximum load factor.
30+
public IntNHash(long capacity, int keySize, BigArrays bigArrays) {
31+
this(capacity, keySize, DEFAULT_MAX_LOAD_FACTOR, bigArrays);
32+
}
33+
34+
// Constructor with configurable capacity and load factor.
35+
public IntNHash(long capacity, int keySize, float maxLoadFactor, BigArrays bigArrays) {
36+
super(capacity, maxLoadFactor, bigArrays);
37+
this.keySize = keySize;
38+
this.scratch = new int[keySize];
39+
try {
40+
// `super` allocates a big array so we have to `close` if we fail here or we'll leak it.
41+
keyArray = bigArrays.newIntArray(keySize * capacity, false);
42+
} finally {
43+
if (keyArray == null) {
44+
close();
45+
}
46+
}
47+
}
48+
49+
public int[] getKeys(long id) {
50+
getKeys(id, scratch);
51+
return scratch;
52+
}
53+
54+
public void getKeys(long id, int[] dst) {
55+
assert dst.length == keySize;
56+
for (int i = 0; i < keySize; i++) {
57+
dst[i] = keyArray.get(keySize * id + i);
58+
}
59+
}
60+
61+
private boolean keyEquals(long id, int[] keys) {
62+
long keyOffset = keySize * id;
63+
// TODO: fast equals in BigArray
64+
for (int i = 0; i < keys.length; i++) {
65+
if (keyArray.get(keyOffset + i) != keys[i]) {
66+
return false;
67+
}
68+
}
69+
return true;
70+
}
71+
72+
public long find(int[] keys) {
73+
final long slot = slot(hash(keys), mask);
74+
for (long index = slot;; index = nextSlot(index, mask)) {
75+
final long id = id(index);
76+
if (id == -1) {
77+
return id;
78+
} else if (keyEquals(id, keys)) {
79+
return id;
80+
}
81+
}
82+
}
83+
84+
private long set(long id, int[] keys) {
85+
assert size < maxSize;
86+
long slot = slot(hash(keys), mask);
87+
for (long index = slot;; index = nextSlot(index, mask)) {
88+
final long curId = id(index);
89+
if (curId == -1) { // means unset
90+
setId(index, id);
91+
append(id, keys);
92+
++size;
93+
return id;
94+
} else {
95+
if (keyEquals(curId, keys)) {
96+
return -1 - curId;
97+
}
98+
}
99+
}
100+
}
101+
102+
private void append(long id, int[] keys) {
103+
final long keyOffset = keySize * id;
104+
keyArray = bigArrays.grow(keyArray, keyOffset + keySize);
105+
for (int i = 0; i < keys.length; i++) {
106+
keyArray.set(keyOffset + i, keys[i]);
107+
}
108+
}
109+
110+
private void reset(long id) {
111+
final long slot = slot(hashFromKeyArray(id), mask);
112+
for (long index = slot;; index = nextSlot(index, mask)) {
113+
final long curId = id(index);
114+
if (curId == -1) { // means unset
115+
setId(index, id);
116+
break;
117+
}
118+
}
119+
}
120+
121+
/**
122+
* Try to add {@code key}. Return its newly allocated id if it wasn't in
123+
* the hash table yet, or {@code -1-id} if it was already present in
124+
* the hash table.
125+
*/
126+
public long add(int[] keys) {
127+
if (size >= maxSize) {
128+
assert size == maxSize;
129+
grow();
130+
}
131+
assert size < maxSize;
132+
return set(size, keys);
133+
}
134+
135+
@Override
136+
protected void removeAndAdd(long index) {
137+
final long id = getAndSetId(index, -1);
138+
assert id >= 0;
139+
reset(id);
140+
}
141+
142+
@Override
143+
public void close() {
144+
Releasables.close(keyArray, super::close);
145+
}
146+
147+
static long hash(int[] keys) {
148+
long hash = BitMixer.mix(keys[0]);
149+
for (int i = 1; i < keys.length; i++) {
150+
hash = 31L * hash + BitMixer.mix(keys[i]);
151+
}
152+
return hash;
153+
}
154+
155+
long hashFromKeyArray(long id) {
156+
final long keyOffset = id * keySize;
157+
long hash = BitMixer.mix(keyArray.get(keyOffset));
158+
for (int i = 1; i < keySize; i++) {
159+
hash = 31L * hash + BitMixer.mix(keyArray.get(keyOffset + i));
160+
}
161+
return hash;
162+
}
163+
}

server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -479,7 +479,7 @@ public static BytesRef toBytesRef(BytesRef scratch, String v) {
479479
}
480480

481481
/**
482-
* Build a {@link LeafIteratorLookup} which checks for norms of a text field.
482+
* Build a {@link LeafIteratorLookup} which matches all documents in a segment
483483
*/
484484
public static LeafIteratorLookup lookupMatchingAll() {
485485
return new LeafIteratorLookup() {

server/src/main/java/org/elasticsearch/ingest/IngestService.java

Lines changed: 1 addition & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,6 @@
5252
import org.elasticsearch.cluster.service.MasterServiceTaskQueue;
5353
import org.elasticsearch.common.Priority;
5454
import org.elasticsearch.common.TriConsumer;
55-
import org.elasticsearch.common.bytes.BytesReference;
5655
import org.elasticsearch.common.collect.ImmutableOpenMap;
5756
import org.elasticsearch.common.logging.DeprecationCategory;
5857
import org.elasticsearch.common.logging.DeprecationLogger;
@@ -1391,45 +1390,11 @@ private void attemptToSampleData(
13911390
* We need both the original document and the fully updated document for sampling, so we make a copy of the original
13921391
* before overwriting it here. We can discard it after sampling.
13931392
*/
1394-
samplingService.maybeSample(projectMetadata, indexRequest.index(), () -> {
1395-
IndexRequest original = copyIndexRequestForSampling(indexRequest);
1396-
updateIndexRequestMetadata(original, originalDocumentMetadata);
1397-
return original;
1398-
}, ingestDocument);
1393+
samplingService.maybeSample(projectMetadata, originalDocumentMetadata.getIndex(), indexRequest, ingestDocument);
13991394

14001395
}
14011396
}
14021397

1403-
/**
1404-
* Creates a copy of an IndexRequest to be used by random sampling.
1405-
* @param original The IndexRequest to be copied
1406-
* @return A copy of the IndexRequest
1407-
*/
1408-
private IndexRequest copyIndexRequestForSampling(IndexRequest original) {
1409-
IndexRequest clonedRequest = new IndexRequest(original.index());
1410-
clonedRequest.id(original.id());
1411-
clonedRequest.routing(original.routing());
1412-
clonedRequest.version(original.version());
1413-
clonedRequest.versionType(original.versionType());
1414-
clonedRequest.setPipeline(original.getPipeline());
1415-
clonedRequest.setFinalPipeline(original.getFinalPipeline());
1416-
clonedRequest.setIfSeqNo(original.ifSeqNo());
1417-
clonedRequest.setIfPrimaryTerm(original.ifPrimaryTerm());
1418-
clonedRequest.setRefreshPolicy(original.getRefreshPolicy());
1419-
clonedRequest.waitForActiveShards(original.waitForActiveShards());
1420-
clonedRequest.timeout(original.timeout());
1421-
clonedRequest.opType(original.opType());
1422-
clonedRequest.setParentTask(original.getParentTask());
1423-
clonedRequest.setRequireDataStream(original.isRequireDataStream());
1424-
clonedRequest.setRequireAlias(original.isRequireAlias());
1425-
clonedRequest.setIncludeSourceOnError(original.getIncludeSourceOnError());
1426-
BytesReference source = original.source();
1427-
if (source != null) {
1428-
clonedRequest.source(source, original.getContentType());
1429-
}
1430-
return clonedRequest;
1431-
}
1432-
14331398
private static void executePipeline(
14341399
final IngestDocument ingestDocument,
14351400
final Pipeline pipeline,

0 commit comments

Comments
 (0)