Skip to content

Commit 9aae2d5

Browse files
committed
Create single and multi node IT
1 parent 10d2f48 commit 9aae2d5

File tree

3 files changed

+194
-159
lines changed

3 files changed

+194
-159
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the Elastic License
4+
* 2.0; you may not use this file except in compliance with the Elastic License
5+
* 2.0.
6+
*/
7+
8+
package org.elasticsearch.xpack.esql.qa.multi_node;
9+
10+
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
11+
12+
import org.elasticsearch.test.TestClustersThreadFilter;
13+
import org.elasticsearch.test.cluster.ElasticsearchCluster;
14+
import org.elasticsearch.xpack.esql.qa.rest.KnnSemanticTextTestCase;
15+
import org.junit.ClassRule;
16+
17+
@ThreadLeakFilters(filters = TestClustersThreadFilter.class)
18+
public class KnnSemanticTextIT extends KnnSemanticTextTestCase {
19+
@ClassRule
20+
public static ElasticsearchCluster cluster = Clusters.testCluster(
21+
spec -> spec.module("x-pack-inference").plugin("inference-service-test")
22+
);
23+
24+
@Override
25+
protected String getTestRestCluster() {
26+
return cluster.getHttpAddresses();
27+
}
28+
}

x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/KnnSemanticTextIT.java

Lines changed: 2 additions & 159 deletions
Original file line numberDiff line numberDiff line change
@@ -9,176 +9,19 @@
99

1010
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
1111

12-
import org.elasticsearch.client.Request;
13-
import org.elasticsearch.client.ResponseException;
1412
import org.elasticsearch.test.TestClustersThreadFilter;
1513
import org.elasticsearch.test.cluster.ElasticsearchCluster;
16-
import org.elasticsearch.test.rest.ESRestTestCase;
17-
import org.elasticsearch.xpack.esql.AssertWarnings;
18-
import org.elasticsearch.xpack.esql.CsvTestsDataLoader;
19-
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
20-
import org.elasticsearch.xpack.esql.qa.rest.ProfileLogger;
21-
import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase;
22-
import org.junit.After;
23-
import org.junit.Before;
14+
import org.elasticsearch.xpack.esql.qa.rest.KnnSemanticTextTestCase;
2415
import org.junit.ClassRule;
25-
import org.junit.Rule;
26-
27-
import java.io.IOException;
28-
import java.util.HashMap;
29-
import java.util.List;
30-
import java.util.Map;
31-
32-
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
33-
import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.requestObjectBuilder;
34-
import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsqlSync;
35-
import static org.hamcrest.Matchers.is;
36-
import static org.hamcrest.core.StringContains.containsString;
3716

3817
@ThreadLeakFilters(filters = TestClustersThreadFilter.class)
39-
public class KnnSemanticTextIT extends ESRestTestCase {
18+
public class KnnSemanticTextIT extends KnnSemanticTextTestCase {
4019

4120
@ClassRule
4221
public static ElasticsearchCluster cluster = Clusters.testCluster(spec -> spec.plugin("inference-service-test"));
4322

44-
@Rule(order = Integer.MIN_VALUE)
45-
public ProfileLogger profileLogger = new ProfileLogger();
46-
47-
private int numDocs;
48-
private final Map<Integer, String> indexedTexts = new HashMap<>();
49-
5023
@Override
5124
protected String getTestRestCluster() {
5225
return cluster.getHttpAddresses();
5326
}
54-
55-
@Before
56-
public void checkCapability() {
57-
assumeTrue("knn with semantic text not available", EsqlCapabilities.Cap.KNN_FUNCTION_V4.isEnabled());
58-
}
59-
60-
@SuppressWarnings("unchecked")
61-
public void testKnnQueryWithSemanticText() throws IOException {
62-
String knnQuery = """
63-
FROM semantic-test METADATA _score
64-
| WHERE knn(dense_semantic, [0, 1, 2], 10)
65-
| KEEP id, _score, dense_semantic
66-
| SORT _score DESC
67-
| LIMIT 10
68-
""";
69-
70-
Map<String, Object> response = runEsqlQuery(knnQuery);
71-
List<Map<String, Object>> columns = (List<Map<String, Object>>) response.get("columns");
72-
assertThat(columns.size(), is(3));
73-
List<List<Object>> rows = (List<List<Object>>) response.get("values");
74-
assertThat(rows.size(), is(3));
75-
for (int row = 0; row < rows.size(); row++) {
76-
List<Object> rowData = rows.get(row);
77-
Integer id = (Integer) rowData.get(0);
78-
assertThat(id, is(3 - row));
79-
}
80-
}
81-
82-
public void testKnnQueryOnTextField() throws IOException {
83-
String knnQuery = """
84-
FROM semantic-test METADATA _score
85-
| WHERE knn(text, [0, 1, 2], 10)
86-
| KEEP id, _score, dense_semantic
87-
| SORT _score DESC
88-
| LIMIT 10
89-
""";
90-
91-
ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(knnQuery));
92-
assertThat(re.getResponse().getStatusLine().getStatusCode(), is(BAD_REQUEST.getStatus()));
93-
assertThat(re.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields"));
94-
}
95-
96-
public void testKnnQueryOnSparseSemanticTextField() throws IOException {
97-
String knnQuery = """
98-
FROM semantic-test METADATA _score
99-
| WHERE knn(sparse_semantic, [0, 1, 2], 10)
100-
| KEEP id, _score, sparse_semantic
101-
| SORT _score DESC
102-
| LIMIT 10
103-
""";
104-
105-
ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(knnQuery));
106-
assertThat(re.getResponse().getStatusLine().getStatusCode(), is(BAD_REQUEST.getStatus()));
107-
assertThat(re.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields"));
108-
}
109-
110-
@Before
111-
public void setUp() throws Exception {
112-
super.setUp();
113-
setupInferenceEndpoints();
114-
setupIndex();
115-
}
116-
117-
private void setupIndex() throws IOException {
118-
Request request = new Request("PUT", "/semantic-test");
119-
request.setJsonEntity("""
120-
{
121-
"mappings": {
122-
"properties": {
123-
"id": {
124-
"type": "integer"
125-
},
126-
"dense_semantic": {
127-
"type": "semantic_text",
128-
"inference_id": "test_dense_inference"
129-
},
130-
"sparse_semantic": {
131-
"type": "semantic_text",
132-
"inference_id": "test_sparse_inference"
133-
},
134-
"text": {
135-
"type": "text",
136-
"copy_to": ["dense_semantic", "sparse_semantic"]
137-
}
138-
}
139-
},
140-
"settings": {
141-
"index": {
142-
"number_of_shards": 1,
143-
"number_of_replicas": 0
144-
}
145-
}
146-
}
147-
""");
148-
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
149-
150-
request = new Request("POST", "/_bulk?index=semantic-test&refresh=true");
151-
request.setJsonEntity("""
152-
{"index": {"_id": "1"}}
153-
{"id": 1, "text": "sample text"}
154-
{"index": {"_id": "2"}}
155-
{"id": 2, "text": "another sample text"}
156-
{"index": {"_id": "3"}}
157-
{"id": 3, "text": "yet another sample text"}
158-
""");
159-
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
160-
}
161-
162-
private void setupInferenceEndpoints() throws IOException {
163-
CsvTestsDataLoader.createTextEmbeddingInferenceEndpoint(client());
164-
CsvTestsDataLoader.createSparseEmbeddingInferenceEndpoint(client());
165-
}
166-
167-
@After
168-
public void tearDown() throws Exception {
169-
super.tearDown();
170-
client().performRequest(new Request("DELETE", "semantic-test"));
171-
172-
if (CsvTestsDataLoader.clusterHasTextEmbeddingInferenceEndpoint(client())) {
173-
CsvTestsDataLoader.deleteTextEmbeddingInferenceEndpoint(client());
174-
}
175-
if (CsvTestsDataLoader.clusterHasSparseEmbeddingInferenceEndpoint(client())) {
176-
CsvTestsDataLoader.deleteSparseEmbeddingInferenceEndpoint(client());
177-
}
178-
}
179-
180-
private Map<String, Object> runEsqlQuery(String query) throws IOException {
181-
RestEsqlTestCase.RequestObjectBuilder builder = requestObjectBuilder().query(query);
182-
return runEsqlSync(builder, new AssertWarnings.NoWarnings(), profileLogger);
183-
}
18427
}
Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,164 @@
1+
/*
2+
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
3+
* or more contributor license agreements. Licensed under the Elastic License
4+
* 2.0; you may not use this file except in compliance with the Elastic License
5+
* 2.0.
6+
*/
7+
8+
package org.elasticsearch.xpack.esql.qa.rest;
9+
10+
import org.elasticsearch.client.Request;
11+
import org.elasticsearch.client.ResponseException;
12+
import org.elasticsearch.test.rest.ESRestTestCase;
13+
import org.elasticsearch.xpack.esql.AssertWarnings;
14+
import org.elasticsearch.xpack.esql.CsvTestsDataLoader;
15+
import org.elasticsearch.xpack.esql.action.EsqlCapabilities;
16+
import org.junit.After;
17+
import org.junit.Before;
18+
import org.junit.Rule;
19+
20+
import java.io.IOException;
21+
import java.util.List;
22+
import java.util.Map;
23+
24+
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
25+
import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.requestObjectBuilder;
26+
import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsqlSync;
27+
import static org.hamcrest.Matchers.is;
28+
import static org.hamcrest.core.StringContains.containsString;
29+
30+
public class KnnSemanticTextTestCase extends ESRestTestCase {
31+
32+
@Rule(order = Integer.MIN_VALUE)
33+
public ProfileLogger profileLogger = new ProfileLogger();
34+
35+
@Before
36+
public void checkCapability() {
37+
assumeTrue("knn with semantic text not available", EsqlCapabilities.Cap.KNN_FUNCTION_V4.isEnabled());
38+
}
39+
40+
@SuppressWarnings("unchecked")
41+
public void testKnnQueryWithSemanticText() throws IOException {
42+
String knnQuery = """
43+
FROM semantic-test METADATA _score
44+
| WHERE knn(dense_semantic, [0, 1, 2], 10)
45+
| KEEP id, _score, dense_semantic
46+
| SORT _score DESC
47+
| LIMIT 10
48+
""";
49+
50+
Map<String, Object> response = runEsqlQuery(knnQuery);
51+
List<Map<String, Object>> columns = (List<Map<String, Object>>) response.get("columns");
52+
assertThat(columns.size(), is(3));
53+
List<List<Object>> rows = (List<List<Object>>) response.get("values");
54+
assertThat(rows.size(), is(3));
55+
for (int row = 0; row < rows.size(); row++) {
56+
List<Object> rowData = rows.get(row);
57+
Integer id = (Integer) rowData.get(0);
58+
assertThat(id, is(3 - row));
59+
}
60+
}
61+
62+
public void testKnnQueryOnTextField() throws IOException {
63+
String knnQuery = """
64+
FROM semantic-test METADATA _score
65+
| WHERE knn(text, [0, 1, 2], 10)
66+
| KEEP id, _score, dense_semantic
67+
| SORT _score DESC
68+
| LIMIT 10
69+
""";
70+
71+
ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(knnQuery));
72+
assertThat(re.getResponse().getStatusLine().getStatusCode(), is(BAD_REQUEST.getStatus()));
73+
assertThat(re.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields"));
74+
}
75+
76+
public void testKnnQueryOnSparseSemanticTextField() throws IOException {
77+
String knnQuery = """
78+
FROM semantic-test METADATA _score
79+
| WHERE knn(sparse_semantic, [0, 1, 2], 10)
80+
| KEEP id, _score, sparse_semantic
81+
| SORT _score DESC
82+
| LIMIT 10
83+
""";
84+
85+
ResponseException re = expectThrows(ResponseException.class, () -> runEsqlQuery(knnQuery));
86+
assertThat(re.getResponse().getStatusLine().getStatusCode(), is(BAD_REQUEST.getStatus()));
87+
assertThat(re.getMessage(), containsString("[knn] queries are only supported on [dense_vector] fields"));
88+
}
89+
90+
@Before
91+
public void setUp() throws Exception {
92+
super.setUp();
93+
setupInferenceEndpoints();
94+
setupIndex();
95+
}
96+
97+
private void setupIndex() throws IOException {
98+
Request request = new Request("PUT", "/semantic-test");
99+
request.setJsonEntity("""
100+
{
101+
"mappings": {
102+
"properties": {
103+
"id": {
104+
"type": "integer"
105+
},
106+
"dense_semantic": {
107+
"type": "semantic_text",
108+
"inference_id": "test_dense_inference"
109+
},
110+
"sparse_semantic": {
111+
"type": "semantic_text",
112+
"inference_id": "test_sparse_inference"
113+
},
114+
"text": {
115+
"type": "text",
116+
"copy_to": ["dense_semantic", "sparse_semantic"]
117+
}
118+
}
119+
},
120+
"settings": {
121+
"index": {
122+
"number_of_shards": 1,
123+
"number_of_replicas": 0
124+
}
125+
}
126+
}
127+
""");
128+
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
129+
130+
request = new Request("POST", "/_bulk?index=semantic-test&refresh=true");
131+
request.setJsonEntity("""
132+
{"index": {"_id": "1"}}
133+
{"id": 1, "text": "sample text"}
134+
{"index": {"_id": "2"}}
135+
{"id": 2, "text": "another sample text"}
136+
{"index": {"_id": "3"}}
137+
{"id": 3, "text": "yet another sample text"}
138+
""");
139+
assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode());
140+
}
141+
142+
private void setupInferenceEndpoints() throws IOException {
143+
CsvTestsDataLoader.createTextEmbeddingInferenceEndpoint(client());
144+
CsvTestsDataLoader.createSparseEmbeddingInferenceEndpoint(client());
145+
}
146+
147+
@After
148+
public void tearDown() throws Exception {
149+
super.tearDown();
150+
client().performRequest(new Request("DELETE", "semantic-test"));
151+
152+
if (CsvTestsDataLoader.clusterHasTextEmbeddingInferenceEndpoint(client())) {
153+
CsvTestsDataLoader.deleteTextEmbeddingInferenceEndpoint(client());
154+
}
155+
if (CsvTestsDataLoader.clusterHasSparseEmbeddingInferenceEndpoint(client())) {
156+
CsvTestsDataLoader.deleteSparseEmbeddingInferenceEndpoint(client());
157+
}
158+
}
159+
160+
private Map<String, Object> runEsqlQuery(String query) throws IOException {
161+
RestEsqlTestCase.RequestObjectBuilder builder = requestObjectBuilder().query(query);
162+
return runEsqlSync(builder, new AssertWarnings.NoWarnings(), profileLogger);
163+
}
164+
}

0 commit comments

Comments
 (0)