Skip to content

Commit 7904057

Browse files
committed
Merge remote-tracking branch 'upstream/main' into enh/sort_before_ijoin
2 parents e0407f8 + d78c7e6 commit 7904057

File tree

21 files changed

+1383
-26
lines changed

21 files changed

+1383
-26
lines changed

docs/changelog/137395.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 137395
2+
summary: Fix attribute only in full text function not found
3+
area: ES|QL
4+
type: bug
5+
issues:
6+
- 137396

docs/changelog/137476.yaml

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
pr: 137476
2+
summary: Handle missing geotile buckets
3+
area: Transform
4+
type: bug
5+
issues:
6+
- 126591

libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTests.java

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,12 @@
2121

2222
package org.elasticsearch.tdigest;
2323

24+
import static org.hamcrest.Matchers.lessThanOrEqualTo;
25+
2426
public abstract class BigCountTests extends TDigestTestCase {
2527

2628
public void testBigMerge() {
27-
try (TDigest digest = createDigest()) {
29+
try (TDigest digest = createDigest(100)) {
2830
for (int i = 0; i < 5; i++) {
2931
try (TDigest digestToMerge = getDigest()) {
3032
digest.add(digestToMerge);
@@ -35,13 +37,25 @@ public void testBigMerge() {
3537
}
3638
}
3739

40+
/**
41+
* Verify that, at a range of compression values, the size of the produced digest is not much larger than 10 times the compression
42+
*/
43+
public void testCompression() {
44+
for (int compression : new int[] { 100, 500, 1000, 10000 }) {
45+
try (TDigest digest = createDigest(compression)) {
46+
addData(digest);
47+
assertThat("Compression = " + compression, digest.centroidCount(), lessThanOrEqualTo(compression * 10));
48+
}
49+
}
50+
}
51+
3852
private TDigest getDigest() {
39-
TDigest digest = createDigest();
53+
TDigest digest = createDigest(100);
4054
addData(digest);
4155
return digest;
4256
}
4357

44-
public TDigest createDigest() {
58+
public TDigest createDigest(int compression) {
4559
throw new IllegalStateException("Should have over-ridden createDigest");
4660
}
4761

libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsMergingDigestTests.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
public class BigCountTestsMergingDigestTests extends BigCountTests {
2525
@Override
26-
public TDigest createDigest() {
27-
return TDigest.createMergingDigest(arrays(), 100);
26+
public TDigest createDigest(int compression) {
27+
return TDigest.createMergingDigest(arrays(), compression);
2828
}
2929
}

libs/tdigest/src/test/java/org/elasticsearch/tdigest/BigCountTestsTreeDigestTests.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424
public class BigCountTestsTreeDigestTests extends BigCountTests {
2525
@Override
26-
public TDigest createDigest() {
27-
return TDigest.createAvlTreeDigest(arrays(), 100);
26+
public TDigest createDigest(int compression) {
27+
return TDigest.createAvlTreeDigest(arrays(), compression);
2828
}
2929
}

server/src/main/java/org/elasticsearch/index/codec/vectors/es93/DirectIOCapableLucene99FlatVectorsFormat.java

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -266,22 +266,25 @@ public void nextDocsAndScores(int nextCount, Bits liveDocs, DocAndFloatFeatureBu
266266
buffer.docs[size++] = indexIterator.index();
267267
}
268268
}
269-
int loopBound = size - (size % bulkSize);
269+
final int firstBulkSize = Math.min(bulkSize, size);
270+
for (int j = 0; j < firstBulkSize; j++) {
271+
final long ord = buffer.docs[j];
272+
inputSlice.prefetch(ord * byteSize, byteSize);
273+
}
274+
final int loopBound = size - (size % bulkSize);
270275
int i = 0;
271276
for (; i < loopBound; i += bulkSize) {
272-
for (int j = 0; j < bulkSize; j++) {
273-
long ord = buffer.docs[i + j];
277+
final int nextI = i + bulkSize;
278+
final int nextBulkSize = Math.min(bulkSize, size - nextI);
279+
for (int j = 0; j < nextBulkSize; j++) {
280+
final long ord = buffer.docs[nextI + j];
274281
inputSlice.prefetch(ord * byteSize, byteSize);
275282
}
276283
System.arraycopy(buffer.docs, i, docBuffer, 0, bulkSize);
277284
inner.bulkScore(docBuffer, scoreBuffer, bulkSize);
278285
System.arraycopy(scoreBuffer, 0, buffer.features, i, bulkSize);
279286
}
280-
int countLeft = size - i;
281-
for (int j = i; j < size; j++) {
282-
long ord = buffer.docs[j];
283-
inputSlice.prefetch(ord * byteSize, byteSize);
284-
}
287+
final int countLeft = size - i;
285288
System.arraycopy(buffer.docs, i, docBuffer, 0, countLeft);
286289
inner.bulkScore(docBuffer, scoreBuffer, countLeft);
287290
System.arraycopy(scoreBuffer, 0, buffer.features, i, countLeft);

server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ public class TDigestState implements Releasable, Accountable {
4242
private final TDigest tdigest;
4343

4444
// Supported tdigest types.
45-
protected enum Type {
45+
public enum Type {
4646
HYBRID,
4747
AVL_TREE,
4848
MERGING,

server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
import org.apache.lucene.document.Field;
1212
import org.apache.lucene.document.KeywordField;
13-
import org.apache.lucene.document.StringField;
1413
import org.apache.lucene.index.DirectoryReader;
1514
import org.apache.lucene.index.LeafReaderContext;
1615
import org.apache.lucene.index.Term;
@@ -223,18 +222,27 @@ public void testFielddataLookupTerminatesInLoop() {
223222
}
224223

225224
public void testFielddataLookupSometimesLoop() throws IOException {
226-
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
227-
// simulate a runtime field cycle in the second doc: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
225+
// create this field so we can use it to make sure we're escaping the loop on only the "first" document
226+
var concreteField = new KeywordFieldMapper.KeywordFieldType("indexed_field", true, true, Collections.emptyMap());
227+
228+
// simulate a runtime field cycle in the second doc: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
229+
var runtimeFields = List.of(
228230
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
229231
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
230232
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
231-
runtimeField("4", (leafLookup, docId) -> {
232-
if (docId == 0) {
233+
runtimeField("4", leafLookup -> {
234+
if (leafLookup.doc().get("indexed_field").getFirst().equals("first")) {
233235
return "escape!";
234236
}
235-
return leafLookup.doc().get("4").get(0).toString();
237+
return leafLookup.doc().get("4").getFirst().toString();
236238
})
237239
);
240+
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
241+
"uuid",
242+
null,
243+
createMappingLookup(List.of(concreteField), runtimeFields),
244+
Collections.emptyMap()
245+
);
238246
List<String> values = collect("1", searchExecutionContext, new TermQuery(new Term("indexed_field", "first")));
239247
assertEquals(List.of("escape!"), values);
240248
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", searchExecutionContext));
@@ -776,8 +784,8 @@ private static List<String> collect(String field, SearchExecutionContext searchE
776784
private static List<String> collect(String field, SearchExecutionContext searchExecutionContext, Query query) throws IOException {
777785
List<String> result = new ArrayList<>();
778786
try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
779-
indexWriter.addDocument(List.of(new StringField("indexed_field", "first", Field.Store.NO)));
780-
indexWriter.addDocument(List.of(new StringField("indexed_field", "second", Field.Store.NO)));
787+
indexWriter.addDocument(List.of(new KeywordField("indexed_field", "first", Field.Store.YES)));
788+
indexWriter.addDocument(List.of(new KeywordField("indexed_field", "second", Field.Store.YES)));
781789
try (DirectoryReader reader = indexWriter.getReader()) {
782790
IndexSearcher searcher = newSearcher(reader);
783791
MappedFieldType fieldType = searchExecutionContext.getFieldType(field);

x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
import org.elasticsearch.xpack.analytics.cumulativecardinality.CumulativeCardinalityPipelineAggregationBuilder;
2424
import org.elasticsearch.xpack.analytics.cumulativecardinality.InternalSimpleLongValue;
2525
import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper;
26+
import org.elasticsearch.xpack.analytics.mapper.TDigestFieldMapper;
2627
import org.elasticsearch.xpack.analytics.movingPercentiles.MovingPercentilesPipelineAggregationBuilder;
2728
import org.elasticsearch.xpack.analytics.multiterms.InternalMultiTerms;
2829
import org.elasticsearch.xpack.analytics.multiterms.MultiTermsAggregationBuilder;
@@ -140,6 +141,14 @@ public List<Setting<?>> getSettings() {
140141

141142
@Override
142143
public Map<String, Mapper.TypeParser> getMappers() {
144+
if (TDigestFieldMapper.TDIGEST_FIELD_MAPPER.isEnabled()) {
145+
return Map.of(
146+
HistogramFieldMapper.CONTENT_TYPE,
147+
HistogramFieldMapper.PARSER,
148+
TDigestFieldMapper.CONTENT_TYPE,
149+
TDigestFieldMapper.PARSER
150+
);
151+
}
143152
return Map.of(HistogramFieldMapper.CONTENT_TYPE, HistogramFieldMapper.PARSER);
144153
}
145154

x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/ExponentialHistogramParser.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ private record ParsedZeroBucket(long count, double threshold) {
113113

114114
/**
115115
* Parses an XContent object into an exponential histogram.
116-
* The parse is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
116+
* The parser is expected to point at the next token after {@link XContentParser.Token#START_OBJECT}.
117117
*
118118
* @param mappedFieldName the name of the field being parsed, used for error messages
119119
* @param parser the parser to use

0 commit comments

Comments
 (0)