Skip to content

Commit daa69e7

Browse files
Bugfix
1 parent d849ff9 commit daa69e7

File tree

4 files changed

+18
-15
lines changed

4 files changed

+18
-15
lines changed

test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -758,7 +758,7 @@ public void testLookupExplosionExpression() throws IOException {
758758
public void testLookupExplosionManyFieldsExpression() throws IOException {
759759
int sensorDataCount = 400;
760760
int lookupEntries = 1000;
761-
int joinFieldsCount = 990;
761+
int joinFieldsCount = 399;// only join on 399 columns due to max expression size of 400
762762
Map<?, ?> map = lookupExplosion(sensorDataCount, lookupEntries, joinFieldsCount, lookupEntries, true);
763763
assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries))));
764764
}

x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
package org.elasticsearch.xpack.esql.plan.physical;
99

10+
import org.elasticsearch.TransportVersions;
1011
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
1112
import org.elasticsearch.common.io.stream.StreamInput;
1213
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -54,8 +55,9 @@ public HashJoinExec(
5455

5556
private HashJoinExec(StreamInput in) throws IOException {
5657
super(Source.readFrom((PlanStreamInput) in), in.readNamedWriteable(PhysicalPlan.class), in.readNamedWriteable(PhysicalPlan.class));
57-
// TODO: clean up, we used to read the match fields here.
58-
in.readNamedWriteableCollectionAsList(Attribute.class);
58+
if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_ON_EXPRESSION) == false) {
59+
in.readNamedWriteableCollectionAsList(Attribute.class);
60+
}
5961
this.leftFields = in.readNamedWriteableCollectionAsList(Attribute.class);
6062
this.rightFields = in.readNamedWriteableCollectionAsList(Attribute.class);
6163
this.addedFields = in.readNamedWriteableCollectionAsList(Attribute.class);
@@ -64,8 +66,9 @@ private HashJoinExec(StreamInput in) throws IOException {
6466
@Override
6567
public void writeTo(StreamOutput out) throws IOException {
6668
super.writeTo(out);
67-
// TODO: clean up, we used to read the match fields here.
68-
out.writeNamedWriteableCollection(leftFields);
69+
if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_LOOKUP_JOIN_ON_EXPRESSION) == false) {
70+
out.writeNamedWriteableCollection(leftFields);
71+
}
6972
out.writeNamedWriteableCollection(leftFields);
7073
out.writeNamedWriteableCollection(rightFields);
7174
out.writeNamedWriteableCollection(addedFields);

x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexOperatorTests.java

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@
9191
import java.util.Collections;
9292
import java.util.HashSet;
9393
import java.util.List;
94+
import java.util.Locale;
9495
import java.util.Map;
9596
import java.util.Set;
9697
import java.util.stream.LongStream;
@@ -113,7 +114,10 @@ public static Iterable<Object[]> parametersFactory() {
113114
List<Object[]> operations = new ArrayList<>();
114115
operations.add(new Object[] { null });
115116
for (EsqlBinaryComparison.BinaryComparisonOperation operation : EsqlBinaryComparison.BinaryComparisonOperation.values()) {
116-
operations.add(new Object[] { operation });
117+
// we skip NEQ because there are too many matches and the test can timeout
118+
if (operation != EsqlBinaryComparison.BinaryComparisonOperation.NEQ) {
119+
operations.add(new Object[] { operation });
120+
}
117121
}
118122
return operations;
119123
}
@@ -157,10 +161,6 @@ protected SourceOperator simpleInput(BlockFactory blockFactory, int size) {
157161

158162
@Override
159163
protected void assertSimpleOutput(List<Page> input, List<Page> results) {
160-
if (operation == EsqlBinaryComparison.BinaryComparisonOperation.NEQ) {
161-
// if not equal there are too many matches and we get OOMEs when validating the output
162-
return;
163-
}
164164
/*
165165
* We've configured there to be just a single result per input so the total
166166
* row count is the same. But lookup cuts into pages of length 256 so the
@@ -448,12 +448,12 @@ private AbstractLookupService.LookupShardContextFactory lookupShardContextFactor
448448
};
449449
String suffix = (operation == null) ? "" : ("_right");
450450
StringBuilder props = new StringBuilder();
451-
props.append(String.format("\"match0%s\": { \"type\": \"long\" }", suffix));
451+
props.append(String.format(Locale.ROOT, "\"match0%s\": { \"type\": \"long\" }", suffix));
452452
if (numberOfJoinColumns == 2) {
453-
props.append(String.format(", \"match1%s\": { \"type\": \"long\" }", suffix));
453+
props.append(String.format(Locale.ROOT, ", \"match1%s\": { \"type\": \"long\" }", suffix));
454454
}
455455
props.append(", \"lkwd\": { \"type\": \"keyword\" }, \"lint\": { \"type\": \"integer\" }");
456-
String mapping = String.format("{\n \"doc\": { \"properties\": { %s } }\n}", props.toString());
456+
String mapping = String.format(Locale.ROOT, "{\n \"doc\": { \"properties\": { %s } }\n}", props.toString());
457457
MapperService mapperService = mapperHelper.createMapperService(mapping);
458458
DirectoryReader reader = DirectoryReader.open(lookupIndexDirectory);
459459
SearchExecutionContext executionCtx = mapperHelper.createSearchExecutionContext(mapperService, newSearcher(reader));

x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6803,7 +6803,7 @@ public void testReplaceStringCasingWithInsensitiveEqualsLowerTrue() {
68036803
public void testReplaceStringCasingWithInsensitiveEqualsEquals() {
68046804
for (var fn : List.of("TO_LOWER", "TO_UPPER")) {
68056805
var value = fn.equals("TO_LOWER") ? fn.toLowerCase(Locale.ROOT) : fn.toUpperCase(Locale.ROOT);
6806-
value += "�✈��"; // these should not cause folding, they're not in the upper/lower char class
6806+
value += "🐔✈🔥🎉"; // these should not cause folding, they're not in the upper/lower char class
68076807
var plan = optimizedPlan("FROM test | WHERE " + fn + "(first_name) == \"" + value + "\"");
68086808
var limit = as(plan, Limit.class);
68096809
var filter = as(limit.child(), Filter.class);
@@ -6818,7 +6818,7 @@ public void testReplaceStringCasingWithInsensitiveEqualsEquals() {
68186818
public void testReplaceStringCasingWithInsensitiveEqualsNotEquals() {
68196819
for (var fn : List.of("TO_LOWER", "TO_UPPER")) {
68206820
var value = fn.equals("TO_LOWER") ? fn.toLowerCase(Locale.ROOT) : fn.toUpperCase(Locale.ROOT);
6821-
value += "�✈��"; // these should not cause folding, they're not in the upper/lower char class
6821+
value += "🐔✈🔥🎉"; // these should not cause folding, they're not in the upper/lower char class
68226822
var plan = optimizedPlan("FROM test | WHERE " + fn + "(first_name) != \"" + value + "\"");
68236823
var limit = as(plan, Limit.class);
68246824
var filter = as(limit.child(), Filter.class);

0 commit comments

Comments
 (0)