Skip to content
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
b1ee80e
Lookup Join on Multiple Columns POC WIP
julian-elastic Jul 18, 2025
2c90817
Update docs/changelog/131559.yaml
julian-elastic Jul 18, 2025
4db37fd
Looking join on multiple fields WIP
julian-elastic Jul 21, 2025
ef894f3
Merge branch 'main' into lookupJoin
julian-elastic Jul 21, 2025
e367e8c
Fix more UTs
julian-elastic Jul 21, 2025
744479f
Bugfixes
julian-elastic Jul 21, 2025
c17c993
Bugfixes
julian-elastic Jul 21, 2025
d9462cc
Fix serialization error
julian-elastic Jul 21, 2025
a17fa88
Fix UT error
julian-elastic Jul 22, 2025
3fd48e4
Add more test datasets
julian-elastic Jul 22, 2025
64a07c7
Add UTs for join on 2,3,4 columns
julian-elastic Jul 22, 2025
ea171b1
Merge branch 'main' into lookupJoin
julian-elastic Jul 22, 2025
bae7007
Add handling for remote not supporting LOOKUP JOIN on multiple fields
julian-elastic Jul 23, 2025
6bd2937
Merge branch 'main' into lookupJoin
julian-elastic Jul 23, 2025
71adaa8
Change documentation
julian-elastic Jul 23, 2025
43aa7e1
Fix docs
julian-elastic Jul 24, 2025
7e0d8d7
Add more UTs
julian-elastic Jul 25, 2025
b6c615c
Merge branch 'main' into lookupJoin
julian-elastic Jul 25, 2025
5d9f68f
Address code review feedback
julian-elastic Jul 29, 2025
fc1c63b
Merge branch 'main' into lookupJoin
julian-elastic Jul 29, 2025
c179cea
Add Generative tests for Lookup Join On Multiple Columns
julian-elastic Jul 29, 2025
be2ce94
Merge branch 'main' into lookupJoin
julian-elastic Jul 29, 2025
59c16d9
Remove debugging code
julian-elastic Jul 29, 2025
dd52c02
Address a rare issue in Generative tests
julian-elastic Jul 30, 2025
8b2594b
Address docs issues
julian-elastic Jul 30, 2025
606c099
Merge branch 'main' into lookupJoin
julian-elastic Jul 30, 2025
e585342
Mode docs changes
julian-elastic Jul 30, 2025
72c3ad7
Merge branch 'main' into lookupJoin
julian-elastic Jul 30, 2025
f742160
Address code review feedback
julian-elastic Jul 30, 2025
ed6946b
Enhance LookupFromIndexIT
julian-elastic Jul 30, 2025
806933f
Fix failing UT
julian-elastic Jul 31, 2025
62956af
Merge branch 'main' into lookupJoin
julian-elastic Jul 31, 2025
326cb82
Address more code review comments
julian-elastic Jul 31, 2025
1dbe524
Address more code review comments, part 2
julian-elastic Jul 31, 2025
00b41ed
MatchConfig refactoring and add serialization test
julian-elastic Jul 31, 2025
a036aa6
bugfix
julian-elastic Jul 31, 2025
28d0c7c
Merge branch 'main' into lookupJoin
julian-elastic Jul 31, 2025
ce73957
Add HeapAttackIT cases with join on multiple fields
julian-elastic Aug 1, 2025
cfdb440
Merge branch 'main' into lookupJoin
julian-elastic Aug 1, 2025
96a6891
bugfix
julian-elastic Aug 1, 2025
d90799d
Update docs/changelog/131559.yaml
julian-elastic Aug 12, 2025
ec7f10a
Merge branch 'main' into lookupJoin
julian-elastic Aug 12, 2025
1cf34bc
Address code review comments
julian-elastic Aug 12, 2025
6acb8ef
Merge branch 'main' into lookupJoin
julian-elastic Aug 13, 2025
1102c1b
fix issue with docs
julian-elastic Aug 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import org.apache.http.client.config.RequestConfig;
import org.apache.http.util.EntityUtils;
import org.apache.lucene.tests.util.TimeUnits;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
Expand Down Expand Up @@ -697,13 +698,26 @@ private Map<String, Object> fetchMvLongs() throws IOException {
public void testLookupExplosion() throws IOException {
int sensorDataCount = 400;
int lookupEntries = 10000;
Map<?, ?> map = lookupExplosion(sensorDataCount, lookupEntries);
Map<?, ?> map = lookupExplosion(sensorDataCount, lookupEntries, 1);
assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries))));
}

public void testLookupExplosionManyFields() throws IOException {
int sensorDataCount = 400;
int lookupEntries = 1000;
int joinFieldsCount = 990;
Map<?, ?> map = lookupExplosion(sensorDataCount, lookupEntries, joinFieldsCount);
assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries))));
}

public void testLookupExplosionManyMatchesManyFields() throws IOException {
// 1500, 10000 is enough locally, but some CI machines need more.
assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000, 30));
}

public void testLookupExplosionManyMatches() throws IOException {
// 1500, 10000 is enough locally, but some CI machines need more.
assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000));
assertCircuitBreaks(attempt -> lookupExplosion(attempt * 1500, 10000, 1));
}

public void testLookupExplosionNoFetch() throws IOException {
Expand All @@ -730,11 +744,18 @@ public void testLookupExplosionBigStringManyMatches() throws IOException {
assertCircuitBreaks(attempt -> lookupExplosionBigString(attempt * 500, 1));
}

private Map<String, Object> lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException {
private Map<String, Object> lookupExplosion(int sensorDataCount, int lookupEntries, int joinFieldsCount) throws IOException {
try {
lookupExplosionData(sensorDataCount, lookupEntries);
lookupExplosionData(sensorDataCount, lookupEntries, joinFieldsCount);
StringBuilder query = startQuery();
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(location)\"}");
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON ");
for (int i = 0; i < joinFieldsCount; i++) {
if (i != 0) {
query.append(",");
}
query.append("id").append(i);
}
query.append(" | STATS COUNT(location)\"}");
return responseAsMap(query(query.toString(), null));
} finally {
deleteIndex("sensor_data");
Expand All @@ -744,24 +765,24 @@ private Map<String, Object> lookupExplosion(int sensorDataCount, int lookupEntri

private Map<String, Object> lookupExplosionNoFetch(int sensorDataCount, int lookupEntries) throws IOException {
try {
lookupExplosionData(sensorDataCount, lookupEntries);
lookupExplosionData(sensorDataCount, lookupEntries, 1);
StringBuilder query = startQuery();
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}");
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id0 | STATS COUNT(*)\"}");
return responseAsMap(query(query.toString(), null));
} finally {
deleteIndex("sensor_data");
deleteIndex("sensor_lookup");
}
}

private void lookupExplosionData(int sensorDataCount, int lookupEntries) throws IOException {
initSensorData(sensorDataCount, 1);
initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484");
private void lookupExplosionData(int sensorDataCount, int lookupEntries, int joinFieldCount) throws IOException {
initSensorData(sensorDataCount, 1, joinFieldCount);
initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484", joinFieldCount);
}

private Map<String, Object> lookupExplosionBigString(int sensorDataCount, int lookupEntries) throws IOException {
try {
initSensorData(sensorDataCount, 1);
initSensorData(sensorDataCount, 1, 1);
initSensorLookupString(lookupEntries, 1, i -> {
int target = Math.toIntExact(ByteSizeValue.ofMb(1).getBytes());
StringBuilder str = new StringBuilder(Math.toIntExact(ByteSizeValue.ofMb(2).getBytes()));
Expand All @@ -772,7 +793,7 @@ private Map<String, Object> lookupExplosionBigString(int sensorDataCount, int lo
return str.toString();
});
StringBuilder query = startQuery();
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(string)\"}");
query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id0 | STATS COUNT(string)\"}");
return responseAsMap(query(query.toString(), null));
} finally {
deleteIndex("sensor_data");
Expand All @@ -794,11 +815,11 @@ public void testEnrichExplosionManyMatches() throws IOException {

private Map<String, Object> enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException {
try {
initSensorData(sensorDataCount, 1);
initSensorData(sensorDataCount, 1, 1);
initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484");
try {
StringBuilder query = startQuery();
query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}");
query.append("FROM sensor_data | ENRICH sensor ON id0 | STATS COUNT(*)\"}");
return responseAsMap(query(query.toString(), null));
} finally {
Request delete = new Request("DELETE", "/_enrich/policy/sensor");
Expand Down Expand Up @@ -958,25 +979,42 @@ private void initMvLongsIndex(int docs, int fields, int fieldValues) throws IOEx
initIndex("mv_longs", bulk.toString());
}

private void initSensorData(int docCount, int sensorCount) throws IOException {
private void initSensorData(int docCount, int sensorCount, int joinFieldCount) throws IOException {
logger.info("loading sensor data");
createIndex("sensor_data", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """
{
"properties": {
"@timestamp": { "type": "date" },
"id": { "type": "long" },
// We cannot go over 1000 fields, due to failed on parsing mappings on index creation
// [sensor_data] java.lang.IllegalArgumentException: Limit of total fields [1000] has been exceeded
assertTrue("Too many columns, it will throw an exception later", joinFieldCount <= 990);
StringBuilder createIndexBuilder = new StringBuilder();
createIndexBuilder.append("""
{
"properties": {
"@timestamp": { "type": "date" },
""");
for (int i = 0; i < joinFieldCount; i++) {
createIndexBuilder.append("\"id").append(i).append("\": { \"type\": \"long\" },");
}
createIndexBuilder.append("""
"value": { "type": "double" }
}
}""");
CreateIndexResponse response = createIndex(
"sensor_data",
Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(),
createIndexBuilder.toString()
);
assertTrue(response.isAcknowledged());
int docsPerBulk = 1000;
long firstDate = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2025-01-01T00:00:00Z");

StringBuilder data = new StringBuilder();
for (int i = 0; i < docCount; i++) {
data.append(String.format(Locale.ROOT, """
{"create":{}}
{"timestamp":"%s", "id": %d, "value": %f}
""", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i * 10L + firstDate), i % sensorCount, i * 1.1));
{"timestamp":"%s",""", DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i * 10L + firstDate)));
for (int j = 0; j < joinFieldCount; j++) {
data.append(String.format(Locale.ROOT, "\"id%d\":%d, ", j, i % sensorCount));
}
data.append(String.format(Locale.ROOT, "\"value\": %f}\n", i * 1.1));
if (i % docsPerBulk == docsPerBulk - 1) {
bulk("sensor_data", data.toString());
data.setLength(0);
Expand All @@ -985,23 +1023,42 @@ private void initSensorData(int docCount, int sensorCount) throws IOException {
initIndex("sensor_data", data.toString());
}

private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction<String> location) throws IOException {
private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction<String> location, int joinFieldsCount)
throws IOException {
logger.info("loading sensor lookup");
createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """
// cannot go over 1000 fields, due to failed on parsing mappings on index creation
// [sensor_data] java.lang.IllegalArgumentException: Limit of total fields [1000] has been exceeded
assertTrue("Too many join on fields, it will throw an exception later", joinFieldsCount <= 990);
StringBuilder createIndexBuilder = new StringBuilder();
createIndexBuilder.append("""
{
"properties": {
"id": { "type": "long" },
""");
for (int i = 0; i < joinFieldsCount; i++) {
createIndexBuilder.append("\"id").append(i).append("\": { \"type\": \"long\" },");
}
createIndexBuilder.append("""
"location": { "type": "geo_point" }
}
}""");
CreateIndexResponse response = createIndex(
"sensor_lookup",
Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(),
createIndexBuilder.toString()
);
assertTrue(response.isAcknowledged());
int docsPerBulk = 1000;
StringBuilder data = new StringBuilder();
for (int i = 0; i < lookupEntries; i++) {
int sensor = i % sensorCount;
data.append(String.format(Locale.ROOT, """
{"create":{}}
{"id": %d, "location": "POINT(%s)"}
""", sensor, location.apply(sensor)));
{"""));
for (int j = 0; j < joinFieldsCount; j++) {
data.append(String.format(Locale.ROOT, "\"id%d\":%d, ", j, sensor));
}
data.append(String.format(Locale.ROOT, """
"location": "POINT(%s)"}\n""", location.apply(sensor)));
if (i % docsPerBulk == docsPerBulk - 1) {
bulk("sensor_lookup", data.toString());
data.setLength(0);
Expand All @@ -1015,7 +1072,7 @@ private void initSensorLookupString(int lookupEntries, int sensorCount, IntFunct
createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """
{
"properties": {
"id": { "type": "long" },
"id0": { "type": "long" },
"string": { "type": "text" }
}
}""");
Expand All @@ -1025,7 +1082,7 @@ private void initSensorLookupString(int lookupEntries, int sensorCount, IntFunct
int sensor = i % sensorCount;
data.append(String.format(Locale.ROOT, """
{"create":{}}
{"id": %d, "string": "%s"}
{"id0": %d, "string": "%s"}
""", sensor, string.apply(sensor)));
if (i % docsPerBulk == docsPerBulk - 1) {
bulk("sensor_lookup", data.toString());
Expand All @@ -1036,15 +1093,15 @@ private void initSensorLookupString(int lookupEntries, int sensorCount, IntFunct
}

private void initSensorEnrich(int lookupEntries, int sensorCount, IntFunction<String> location) throws IOException {
initSensorLookup(lookupEntries, sensorCount, location);
initSensorLookup(lookupEntries, sensorCount, location, 1);
logger.info("loading sensor enrich");

Request create = new Request("PUT", "/_enrich/policy/sensor");
create.setJsonEntity("""
{
"match": {
"indices": "sensor_lookup",
"match_field": "id",
"match_field": "id0",
"enrich_fields": ["location"]
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ protected Writeable.Writer<T> instanceWriter() {
* Copy the {@link Writeable} by round tripping it through {@linkplain StreamInput} and {@linkplain StreamOutput}.
*/
@Override
protected final T copyInstance(T instance, TransportVersion version) throws IOException {
protected T copyInstance(T instance, TransportVersion version) throws IOException {
return copyInstance(instance, getNamedWriteableRegistry(), instanceWriter(), instanceReader(), version);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import org.elasticsearch.compute.data.LongVector;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.LongDoubleTupleBlockSourceOperator;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.ArrayList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.search.aggregations.metrics.TDigestState;
import org.junit.Before;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.List;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.compute.data.BlockUtils;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;

import java.util.Arrays;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.elasticsearch.compute.operator.EvalOperator.EvalOperatorFactory;
import org.elasticsearch.compute.test.CannedSourceOperator;
import org.elasticsearch.compute.test.OperatorTestCase;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.hamcrest.Matcher;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.test.CannedSourceOperator;
import org.elasticsearch.compute.test.OperatorTestCase;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.hamcrest.Matcher;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.test.BlockTestUtils;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.hamcrest.Matcher;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.test.OperatorTestCase;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.hamcrest.Matcher;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.elasticsearch.compute.test.OperatorTestCase;
import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator;
import org.elasticsearch.compute.test.TestBlockFactory;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.hamcrest.Matcher;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import org.elasticsearch.compute.data.BlockFactory;
import org.elasticsearch.compute.data.BlockUtils;
import org.elasticsearch.compute.data.DocBlock;
import org.elasticsearch.compute.test.TupleAbstractBlockSourceOperator;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.mapper.BlockLoader;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,15 @@
import org.elasticsearch.compute.operator.Operator;
import org.elasticsearch.compute.operator.PageConsumerOperator;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.compute.operator.TupleAbstractBlockSourceOperator;
import org.elasticsearch.compute.operator.TupleDocLongBlockSourceOperator;
import org.elasticsearch.compute.operator.TupleLongLongBlockSourceOperator;
import org.elasticsearch.compute.test.CannedSourceOperator;
import org.elasticsearch.compute.test.OperatorTestCase;
import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator;
import org.elasticsearch.compute.test.TestBlockBuilder;
import org.elasticsearch.compute.test.TestBlockFactory;
import org.elasticsearch.compute.test.TestDriverFactory;
import org.elasticsearch.compute.test.TupleAbstractBlockSourceOperator;
import org.elasticsearch.compute.test.TupleLongLongBlockSourceOperator;
import org.elasticsearch.core.RefCounted;
import org.elasticsearch.core.SimpleRefCounted;
import org.elasticsearch.core.Tuple;
Expand Down
Loading
Loading