-
Notifications
You must be signed in to change notification settings - Fork 25.6k
ESQL: Speed up spec tests #131949
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
ESQL: Speed up spec tests #131949
Changes from 4 commits
79ebb5e
75e4f98
c09517b
8a5fae6
2d6d9cc
dbdc721
4a1db88
c4e8750
2b86b14
7f41613
e81956a
e8d54fa
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -68,7 +68,6 @@ | |
| import static org.elasticsearch.xpack.esql.CsvTestUtils.ExpectedResults; | ||
| import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; | ||
| import static org.elasticsearch.xpack.esql.CsvTestUtils.loadCsvSpecValues; | ||
| import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.availableDatasetsForEs; | ||
| import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.createInferenceEndpoints; | ||
| import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.deleteInferenceEndpoints; | ||
| import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.loadDataSetIntoEs; | ||
|
|
@@ -79,6 +78,7 @@ | |
| import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SEMANTIC_TEXT_FIELD_CAPS; | ||
| import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.SOURCE_FIELD_MAPPING; | ||
| import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.assertNotPartial; | ||
| import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.hasCapabilities; | ||
|
|
||
| // This test can run very long in serverless configurations | ||
| @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) | ||
|
|
@@ -101,19 +101,7 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { | |
| public static List<Object[]> readScriptSpec() throws Exception { | ||
| List<URL> urls = classpathResources("/*.csv-spec"); | ||
| assertTrue("Not enough specs found " + urls, urls.size() > 0); | ||
| List<Object[]> specs = SpecReader.readScriptSpec(urls, specParser()); | ||
|
|
||
| int len = specs.get(0).length; | ||
| List<Object[]> testcases = new ArrayList<>(); | ||
| for (var spec : specs) { | ||
| for (Mode mode : Mode.values()) { | ||
| Object[] obj = new Object[len + 1]; | ||
| System.arraycopy(spec, 0, obj, 0, len); | ||
| obj[len] = mode; | ||
| testcases.add(obj); | ||
| } | ||
| } | ||
| return testcases; | ||
| return SpecReader.readScriptSpec(urls, specParser()); | ||
| } | ||
|
|
||
| protected EsqlSpecTestCase( | ||
|
|
@@ -122,34 +110,38 @@ protected EsqlSpecTestCase( | |
| String testName, | ||
| Integer lineNumber, | ||
| CsvTestCase testCase, | ||
| String instructions, | ||
| Mode mode | ||
| String instructions | ||
| ) { | ||
| this.fileName = fileName; | ||
| this.groupName = groupName; | ||
| this.testName = testName; | ||
| this.lineNumber = lineNumber; | ||
| this.testCase = testCase; | ||
| this.instructions = instructions; | ||
| this.mode = mode; | ||
| this.mode = randomFrom(Mode.values()); | ||
| } | ||
|
|
||
| private static boolean dataLoaded = false; | ||
|
|
||
| @Before | ||
| public void setup() throws IOException { | ||
| if (supportsInferenceTestService()) { | ||
| createInferenceEndpoints(adminClient()); | ||
| } | ||
|
|
||
| boolean supportsLookup = supportsIndexModeLookup(); | ||
| boolean supportsSourceMapping = supportsSourceFieldMapping(); | ||
| if (indexExists(availableDatasetsForEs(client(), supportsLookup, supportsSourceMapping).iterator().next().indexName()) == false) { | ||
| loadDataSetIntoEs(client(), supportsLookup, supportsSourceMapping); | ||
| boolean supportsInferenceTestService = supportsInferenceTestService(); | ||
| if (dataLoaded == false) { | ||
| if (supportsInferenceTestService) { | ||
| createInferenceEndpoints(adminClient()); | ||
| } | ||
|
|
||
| loadDataSetIntoEs(client(), supportsLookup, supportsSourceMapping, supportsInferenceTestService); | ||
| dataLoaded = true; | ||
| } | ||
| } | ||
|
|
||
| @AfterClass | ||
| public static void wipeTestData() throws IOException { | ||
| try { | ||
| dataLoaded = false; | ||
| adminClient().performRequest(new Request("DELETE", "/*")); | ||
| } catch (ResponseException e) { | ||
| // 404 here just means we had no indexes | ||
|
|
@@ -159,7 +151,6 @@ public static void wipeTestData() throws IOException { | |
| } | ||
|
|
||
| deleteInferenceEndpoints(adminClient()); | ||
|
|
||
| } | ||
|
|
||
| public boolean logResults() { | ||
|
|
@@ -211,38 +202,6 @@ protected static void checkCapabilities(RestClient client, TestFeatureService te | |
| } | ||
| } | ||
|
|
||
| protected static boolean hasCapabilities(List<String> requiredCapabilities) throws IOException { | ||
| return hasCapabilities(adminClient(), requiredCapabilities); | ||
| } | ||
|
|
||
| public static boolean hasCapabilities(RestClient client, List<String> requiredCapabilities) throws IOException { | ||
| if (requiredCapabilities.isEmpty()) { | ||
| return true; | ||
| } | ||
| try { | ||
| if (clusterHasCapability(client, "POST", "/_query", List.of(), requiredCapabilities).orElse(false)) { | ||
| return true; | ||
| } | ||
| LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); | ||
| } catch (ResponseException e) { | ||
| if (e.getResponse().getStatusLine().getStatusCode() / 100 == 4) { | ||
| /* | ||
| * The node we're testing against is too old for the capabilities | ||
| * API which means it has to be pretty old. Very old capabilities | ||
| * are ALSO present in the features API, so we can check them instead. | ||
| * | ||
| * It's kind of weird that we check for *any* 400, but that's required | ||
| * because old versions of Elasticsearch return 400, not the expected | ||
| * 404. | ||
| */ | ||
| LOGGER.info("capabilities API failed, falling back to cluster features"); | ||
| } else { | ||
| throw e; | ||
| } | ||
| } | ||
| return false; | ||
| } | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Moved. Also, we don't need this big comment. It was require when this test supported versions of elasticsearch without the capability api. |
||
|
|
||
| protected boolean supportsInferenceTestService() { | ||
| return true; | ||
| } | ||
|
|
@@ -271,7 +230,9 @@ protected final void doTest(String query) throws Throwable { | |
| builder.tables(tables()); | ||
| } | ||
|
|
||
| Map<?, ?> prevTooks = supportsTook() ? tooks() : null; | ||
| boolean checkTook = supportsTook() && rarely(); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. About 20% of the time we check There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's not our numbers that are expensive - it's the fact that we have to fetch the entire x-pack usage api. |
||
|
|
||
| Map<?, ?> prevTooks = checkTook ? tooks() : null; | ||
| Map<String, Object> answer = RestEsqlTestCase.runEsql( | ||
| builder.query(query), | ||
| testCase.assertWarnings(deduplicateExactWarnings()), | ||
|
|
@@ -296,7 +257,7 @@ protected final void doTest(String query) throws Throwable { | |
|
|
||
| assertResults(expectedColumnsWithValues, actualColumns, actualValues, testCase.ignoreOrder, logger); | ||
|
|
||
| if (supportsTook()) { | ||
| if (checkTook) { | ||
| LOGGER.info("checking took incremented from {}", prevTooks); | ||
| long took = ((Number) answer.get("took")).longValue(); | ||
| int prevTookHisto = ((Number) prevTooks.remove(tookKey(took))).intValue(); | ||
|
|
@@ -413,15 +374,14 @@ protected boolean preserveClusterUponCompletion() { | |
| return true; | ||
| } | ||
|
|
||
| @Before | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Saves some time at the cost of some paranoia. |
||
| @After | ||
| public void assertRequestBreakerEmptyAfterTests() throws Exception { | ||
| assertRequestBreakerEmpty(); | ||
| } | ||
|
|
||
| public static void assertRequestBreakerEmpty() throws Exception { | ||
| assertBusy(() -> { | ||
| HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); | ||
| HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats?metric=breaker")).getEntity(); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Saves a bunch of bytes across the wire. We only really want the breaker stats. |
||
| Map<?, ?> stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); | ||
| Map<?, ?> nodes = (Map<?, ?>) stats.get("nodes"); | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Unused.