diff --git a/.github/workflows/cron-job-its.yml b/.github/workflows/cron-job-its.yml index d13ae4c6ad62..1f555778ab9f 100644 --- a/.github/workflows/cron-job-its.yml +++ b/.github/workflows/cron-job-its.yml @@ -88,7 +88,7 @@ jobs: strategy: fail-fast: false matrix: - testing_group: [ query, query-retry, query-error, security ] + testing_group: [ query, security ] uses: ./.github/workflows/reusable-standard-its.yml needs: build with: diff --git a/.github/workflows/standard-its.yml b/.github/workflows/standard-its.yml index 1af7acd9c8e4..3c30c5c91945 100644 --- a/.github/workflows/standard-its.yml +++ b/.github/workflows/standard-its.yml @@ -78,7 +78,7 @@ jobs: strategy: fail-fast: false matrix: - testing_group: [query, query-retry, query-error, security, centralized-datasource-schema] + testing_group: [query, security, centralized-datasource-schema] uses: ./.github/workflows/reusable-standard-its.yml if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} with: diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITJdbcQueryTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/JdbcQueryTest.java similarity index 57% rename from integration-tests/src/test/java/org/apache/druid/tests/query/ITJdbcQueryTest.java rename to embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/JdbcQueryTest.java index e63fd1341963..320208537538 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITJdbcQueryTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/JdbcQueryTest.java @@ -17,22 +17,15 @@ * under the License. */ -package org.apache.druid.tests.query; +package org.apache.druid.testing.embedded.query; import com.google.common.collect.ImmutableList; -import com.google.inject.Inject; import org.apache.calcite.avatica.AvaticaSqlException; -import org.apache.druid.https.SSLClientConfig; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.testing.guice.DruidTestModuleFactory; -import org.apache.druid.testing.tools.IntegrationTestingConfig; -import org.apache.druid.testing.utils.DataLoaderHelper; -import org.apache.druid.tests.TestNGGroup; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -47,69 +40,40 @@ import java.util.Properties; import java.util.Set; -@Test(groups = {TestNGGroup.QUERY, TestNGGroup.CENTRALIZED_DATASOURCE_SCHEMA}) -@Guice(moduleFactory = DruidTestModuleFactory.class) -public class ITJdbcQueryTest +/** + * JDBC query integration tests. + * Note: we need to correspond queries with TLS support to fullfill the conversion + */ +public class JdbcQueryTest extends QueryTestBase { - private static final Logger LOG = new Logger(ITJdbcQueryTest.class); - private static final String WIKIPEDIA_DATA_SOURCE = "wikipedia_editstream"; + private static final Logger LOG = new Logger(JdbcQueryTest.class); private static final String CONNECTION_TEMPLATE = "jdbc:avatica:remote:url=%s/druid/v2/sql/avatica/"; private static final String TLS_CONNECTION_TEMPLATE = "jdbc:avatica:remote:url=%s/druid/v2/sql/avatica/;truststore=%s;truststore_password=%s;keystore=%s;keystore_password=%s;key_password=%s"; private static final String QUERY_TEMPLATE = - "SELECT \"user\", SUM(\"added\"), COUNT(*)" + - "FROM \"wikipedia\" " + - "WHERE \"__time\" >= CURRENT_TIMESTAMP - INTERVAL '99' YEAR AND \"language\" = %s" + - "GROUP BY 1 ORDER BY 3 DESC LIMIT 10"; - private static final String QUERY = StringUtils.format(QUERY_TEMPLATE, "'en'"); - - private static final String QUERY_PARAMETERIZED = StringUtils.format(QUERY_TEMPLATE, "?"); + "SELECT \"item\", SUM(\"value\"), COUNT(*) " + + "FROM \"%s\" " + + "WHERE \"__time\" >= CURRENT_TIMESTAMP - INTERVAL '99' YEAR AND \"value\" < %s \n" + + "GROUP BY 1 ORDER BY 3 DESC LIMIT 10"; private String[] connections; private Properties connectionProperties; - @Inject - private IntegrationTestingConfig config; - - @Inject - SSLClientConfig sslConfig; + private String dataSourceName; - @Inject - private DataLoaderHelper dataLoaderHelper; - - @BeforeMethod - public void before() + @Override + protected void beforeAll() { connectionProperties = new Properties(); connectionProperties.setProperty("user", "admin"); connectionProperties.setProperty("password", "priest"); connections = new String[]{ - StringUtils.format(CONNECTION_TEMPLATE, config.getRouterUrl()), - StringUtils.format(CONNECTION_TEMPLATE, config.getBrokerUrl()), - StringUtils.format( - TLS_CONNECTION_TEMPLATE, - config.getRouterTLSUrl(), - sslConfig.getTrustStorePath(), - sslConfig.getTrustStorePasswordProvider().getPassword(), - sslConfig.getKeyStorePath(), - sslConfig.getKeyStorePasswordProvider().getPassword(), - sslConfig.getKeyManagerPasswordProvider().getPassword() - ), - StringUtils.format( - TLS_CONNECTION_TEMPLATE, - config.getBrokerTLSUrl(), - sslConfig.getTrustStorePath(), - sslConfig.getTrustStorePasswordProvider().getPassword(), - sslConfig.getKeyStorePath(), - sslConfig.getKeyStorePasswordProvider().getPassword(), - sslConfig.getKeyManagerPasswordProvider().getPassword() - ) - }; - // ensure that wikipedia segments are loaded completely - dataLoaderHelper.waitUntilDatasourceIsReady(WIKIPEDIA_DATA_SOURCE); - dataLoaderHelper.waitUntilDatasourceIsReady("wikipedia"); - dataLoaderHelper.waitUntilDatasourceIsReady("twitterstream"); + StringUtils.format(CONNECTION_TEMPLATE, getServerUrl(router)), + StringUtils.format(CONNECTION_TEMPLATE, getServerUrl(broker)), + }; + + dataSourceName = ingestBasicData(); } @Test @@ -126,7 +90,7 @@ public void testJdbcMetadata() catalogs.add(catalog); } LOG.info("catalogs %s", catalogs); - Assert.assertEquals(catalogs, ImmutableList.of("druid")); + Assertions.assertEquals(catalogs, ImmutableList.of("druid")); Set schemas = new HashSet<>(); ResultSet schemasMetadata = metadata.getSchemas("druid", null); @@ -136,7 +100,7 @@ public void testJdbcMetadata() } LOG.info("'druid' catalog schemas %s", schemas); // maybe more schemas than this, but at least should have these - Assert.assertTrue(schemas.containsAll(ImmutableList.of("INFORMATION_SCHEMA", "druid", "lookup", "sys"))); + Assertions.assertTrue(schemas.containsAll(ImmutableList.of("INFORMATION_SCHEMA", "druid", "lookup", "sys"))); Set druidTables = new HashSet<>(); ResultSet tablesMetadata = metadata.getTables("druid", "druid", null, null); @@ -145,25 +109,25 @@ public void testJdbcMetadata() druidTables.add(table); } LOG.info("'druid' schema tables %s", druidTables); - // maybe more tables than this, but at least should have these - Assert.assertTrue( - druidTables.containsAll(ImmutableList.of("twitterstream", "wikipedia", WIKIPEDIA_DATA_SOURCE)) + // There may be more tables than this, but at least should have @tableName + Assertions.assertTrue( + druidTables.containsAll(ImmutableList.of(dataSourceName)) ); Set wikiColumns = new HashSet<>(); - ResultSet columnsMetadata = metadata.getColumns("druid", "druid", WIKIPEDIA_DATA_SOURCE, null); + ResultSet columnsMetadata = metadata.getColumns("druid", "druid", dataSourceName, null); while (columnsMetadata.next()) { final String column = columnsMetadata.getString(4); wikiColumns.add(column); } - LOG.info("'%s' columns %s", WIKIPEDIA_DATA_SOURCE, wikiColumns); + LOG.info("'%s' columns %s", dataSourceName, wikiColumns); // a lot more columns than this, but at least should have these - Assert.assertTrue( - wikiColumns.containsAll(ImmutableList.of("added", "city", "delta", "language")) + Assertions.assertTrue( + wikiColumns.containsAll(ImmutableList.of("__time", "item", "value")) ); } catch (SQLException throwables) { - Assert.fail(throwables.getMessage()); + Assertions.fail(throwables.getMessage()); } } } @@ -171,21 +135,22 @@ public void testJdbcMetadata() @Test public void testJdbcStatementQuery() { + String query = StringUtils.format(QUERY_TEMPLATE, dataSourceName, "1000"); for (String url : connections) { try (Connection connection = DriverManager.getConnection(url, connectionProperties)) { try (Statement statement = connection.createStatement()) { - final ResultSet resultSet = statement.executeQuery(QUERY); + final ResultSet resultSet = statement.executeQuery(query); int resultRowCount = 0; while (resultSet.next()) { resultRowCount++; LOG.info("%s,%s,%s", resultSet.getString(1), resultSet.getLong(2), resultSet.getLong(3)); } - Assert.assertEquals(resultRowCount, 10); + Assertions.assertEquals(7, resultRowCount); resultSet.close(); } } catch (SQLException throwables) { - Assert.fail(throwables.getMessage()); + Assertions.fail(throwables.getMessage()); } } } @@ -193,35 +158,41 @@ public void testJdbcStatementQuery() @Test public void testJdbcPrepareStatementQuery() { + String query = StringUtils.format(QUERY_TEMPLATE, dataSourceName, "?"); for (String url : connections) { try (Connection connection = DriverManager.getConnection(url, connectionProperties)) { - try (PreparedStatement statement = connection.prepareStatement(QUERY_PARAMETERIZED)) { - statement.setString(1, "en"); + try (PreparedStatement statement = connection.prepareStatement(query)) { + statement.setLong(1, 1000); final ResultSet resultSet = statement.executeQuery(); int resultRowCount = 0; while (resultSet.next()) { resultRowCount++; LOG.info("%s,%s,%s", resultSet.getString(1), resultSet.getLong(2), resultSet.getLong(3)); } - Assert.assertEquals(resultRowCount, 10); + Assertions.assertEquals(7, resultRowCount); resultSet.close(); } } catch (SQLException throwables) { - Assert.fail(throwables.getMessage()); + Assertions.fail(throwables.getMessage()); } } } - @Test(expectedExceptions = AvaticaSqlException.class, expectedExceptionsMessageRegExp = ".* No value bound for parameter \\(position \\[1]\\)") - public void testJdbcPrepareStatementQueryMissingParameters() throws SQLException + @Test + public void testJdbcPrepareStatementQueryMissingParameters() { + String query = StringUtils.format(QUERY_TEMPLATE, dataSourceName, "?"); for (String url : connections) { try (Connection connection = DriverManager.getConnection(url, connectionProperties); - PreparedStatement statement = connection.prepareStatement(QUERY_PARAMETERIZED); + PreparedStatement statement = connection.prepareStatement(query); ResultSet resultSet = statement.executeQuery()) { // This won't actually run as we expect the exception to be thrown before it gets here - throw new IllegalStateException(resultSet.toString()); + Assertions.fail(resultSet.toString()); + } + catch (SQLException e) { + Assertions.assertInstanceOf(AvaticaSqlException.class, e); + Assertions.assertTrue(e.getMessage().contains("No value bound for parameter (position [1])")); } } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryErrorTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryErrorTest.java index e3896700823b..5b7846b05709 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryErrorTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryErrorTest.java @@ -29,7 +29,6 @@ import org.apache.druid.query.http.SqlTaskStatus; import org.apache.druid.rpc.HttpResponseException; import org.apache.druid.testing.embedded.EmbeddedClusterApis; -import org.apache.druid.testing.embedded.EmbeddedDruidCluster; import org.apache.druid.testing.embedded.msq.EmbeddedMSQApis; import org.hamcrest.MatcherAssert; import org.junit.jupiter.api.Assertions; @@ -56,42 +55,22 @@ public class QueryErrorTest extends QueryTestBase { // Introduce onAnyRouter(...) and use it; add TLS tests in the follow-up patches - protected String tableName; - - @Override - protected EmbeddedDruidCluster createCluster() - { - overlord.addProperty("druid.manager.segments.pollDuration", "PT0.1s"); - indexer.setServerMemory(600_000_000) - .addProperty("druid.worker.capacity", "4") - .addProperty("druid.processing.numThreads", "2") - .addProperty("druid.segment.handoff.pollDuration", "PT0.1s"); - - return EmbeddedDruidCluster.withEmbeddedDerbyAndZookeeper() - .useLatchableEmitter() - .addServer(overlord) - .addServer(coordinator) - .addServer(broker) - .addServer(router) - .addServer(indexer) - .addServer(historical) - .addExtension(ServerManagerForQueryErrorTestModule.class); - } + protected String testDataSourceName; @Override protected void beforeAll() { - tableName = EmbeddedClusterApis.createTestDatasourceName(); + testDataSourceName = EmbeddedClusterApis.createTestDatasourceName(); EmbeddedMSQApis msqApi = new EmbeddedMSQApis(cluster, overlord); SqlTaskStatus ingestionStatus = msqApi.submitTaskSql(StringUtils.format( "REPLACE INTO %s\n" + "OVERWRITE ALL\n" + "SELECT CURRENT_TIMESTAMP AS __time, 1 AS d PARTITIONED BY ALL", - tableName + testDataSourceName )); cluster.callApi().waitForTaskToSucceed(ingestionStatus.getTaskId(), overlord); - cluster.callApi().waitForAllSegmentsToBeAvailable(tableName, coordinator, broker); + cluster.callApi().waitForAllSegmentsToBeAvailable(testDataSourceName, coordinator, broker); } @Test @@ -267,7 +246,7 @@ private static Map buildTestContext(String key) private ListenableFuture sqlQueryFuture(BrokerClient b, String contextKey) { return b.submitSqlQuery(new ClientSqlQuery( - StringUtils.format("SELECT * FROM %s LIMIT 1", tableName), + StringUtils.format("SELECT * FROM %s LIMIT 1", testDataSourceName), null, false, false, @@ -283,7 +262,7 @@ private ListenableFuture sqlQueryFuture(BrokerClient b, String contextKe private ListenableFuture nativeQueryFuture(BrokerClient b, String contextKey) { return b.submitNativeQuery(new Druids.ScanQueryBuilder() - .dataSource(tableName) + .dataSource(testDataSourceName) .eternityInterval() .limit(1) .context(buildTestContext(contextKey)) diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryRetryOnMissingSegmentsTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryRetryOnMissingSegmentsTest.java index 2e1bdb694654..7b36b52bc4a8 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryRetryOnMissingSegmentsTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryRetryOnMissingSegmentsTest.java @@ -20,17 +20,11 @@ package org.apache.druid.testing.embedded.query; import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.druid.common.utils.IdUtils; -import org.apache.druid.indexing.common.task.IndexTask; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.query.QueryContexts; import org.apache.druid.query.http.ClientSqlQuery; -import org.apache.druid.testing.embedded.EmbeddedClusterApis; -import org.apache.druid.testing.embedded.EmbeddedDruidCluster; -import org.apache.druid.testing.embedded.indexing.MoreResources; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; @@ -69,41 +63,13 @@ private enum Expectation QUERY_FAILURE } - private ObjectMapper jsonMapper; private String tableName; - @Override - protected EmbeddedDruidCluster createCluster() - { - overlord.addProperty("druid.manager.segments.pollDuration", "PT0.1s"); - coordinator.addProperty("druid.manager.segments.useIncrementalCache", "always"); - indexer.setServerMemory(400_000_000) - .addProperty("druid.worker.capacity", "4") - .addProperty("druid.processing.numThreads", "2") - .addProperty("druid.segment.handoff.pollDuration", "PT0.1s"); - - return EmbeddedDruidCluster.withEmbeddedDerbyAndZookeeper() - .useLatchableEmitter() - .addServer(overlord) - .addServer(coordinator) - .addServer(broker) - .addServer(router) - .addServer(indexer) - .addServer(historical) - .addExtension(ServerManagerForQueryErrorTestModule.class); - } - @Override public void beforeAll() { jsonMapper = overlord.bindings().jsonMapper(); - tableName = EmbeddedClusterApis.createTestDatasourceName(); - - final String taskId = IdUtils.getRandomId(); - final IndexTask task = MoreResources.Task.BASIC_INDEX.get().dataSource(tableName).withId(taskId); - cluster.callApi().onLeaderOverlord(o -> o.runTask(taskId, task)); - cluster.callApi().waitForTaskToSucceed(taskId, overlord); - cluster.callApi().waitForAllSegmentsToBeAvailable(tableName, coordinator, broker); + tableName = ingestBasicData(); } @Test diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java index b8e810b7ca60..e025c90d2f14 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java @@ -19,23 +19,33 @@ package org.apache.druid.testing.embedded.query; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.util.concurrent.ListenableFuture; +import org.apache.druid.common.utils.IdUtils; +import org.apache.druid.guice.SleepModule; +import org.apache.druid.indexing.common.task.IndexTask; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.Request; import org.apache.druid.java.util.http.client.response.StatusResponseHandler; import org.apache.druid.java.util.http.client.response.StatusResponseHolder; +import org.apache.druid.query.http.ClientSqlQuery; import org.apache.druid.testing.embedded.EmbeddedBroker; +import org.apache.druid.testing.embedded.EmbeddedClusterApis; import org.apache.druid.testing.embedded.EmbeddedCoordinator; import org.apache.druid.testing.embedded.EmbeddedDruidCluster; import org.apache.druid.testing.embedded.EmbeddedHistorical; import org.apache.druid.testing.embedded.EmbeddedIndexer; import org.apache.druid.testing.embedded.EmbeddedOverlord; import org.apache.druid.testing.embedded.EmbeddedRouter; +import org.apache.druid.testing.embedded.indexing.MoreResources; import org.apache.druid.testing.embedded.junit5.EmbeddedClusterTestBase; import org.jboss.netty.handler.codec.http.HttpMethod; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; +import javax.ws.rs.core.MediaType; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; @@ -46,7 +56,7 @@ public abstract class QueryTestBase extends EmbeddedClusterTestBase { - protected static final String SQL_QUERY_ROUTE = "%s/druid/v2/sql/"; + protected static final String SQL_QUERY_ROUTE = "%s/druid/v2/sql"; public static List SHOULD_USE_BROKER_TO_QUERY = List.of(true, false); protected final EmbeddedBroker broker = new EmbeddedBroker(); @@ -57,6 +67,7 @@ public abstract class QueryTestBase extends EmbeddedClusterTestBase protected final EmbeddedHistorical historical = new EmbeddedHistorical(); protected HttpClient httpClientRef; + protected ObjectMapper jsonMapper; protected String brokerEndpoint; protected String routerEndpoint; @@ -85,7 +96,9 @@ protected EmbeddedDruidCluster createCluster() .addServer(broker) .addServer(router) .addServer(indexer) - .addServer(historical); + .addServer(historical) + .addExtension(ServerManagerForQueryErrorTestModule.class) + .addExtension(SleepModule.class); } @BeforeAll @@ -102,10 +115,55 @@ void setUp() } } + /** + * Ingests test data using the task template {@link MoreResources.Task#BASIC_INDEX} in a synchronous manner. + * + * @return ingested datasource name + */ + protected String ingestBasicData() + { + String datasourceName = EmbeddedClusterApis.createTestDatasourceName(); + + final String taskId = IdUtils.getRandomId(); + final IndexTask task = MoreResources.Task.BASIC_INDEX.get().dataSource(datasourceName).withId(taskId); + cluster.callApi().onLeaderOverlord(o -> o.runTask(taskId, task)); + cluster.callApi().waitForTaskToSucceed(taskId, overlord); + cluster.callApi().waitForAllSegmentsToBeAvailable(datasourceName, coordinator, broker); + return datasourceName; + } + + /** + * Execute an async SQL query against the given endpoint via the HTTP client. + */ + protected ListenableFuture executeQueryAsync(String endpoint, ClientSqlQuery query) + { + URL url; + try { + url = new URL(endpoint); + } + catch (MalformedURLException e) { + throw new AssertionError("Malformed URL"); + } + + Assertions.assertNotNull(jsonMapper); + String serializedQuery; + try { + serializedQuery = jsonMapper.writeValueAsString(query); + } + catch (JsonProcessingException e) { + throw new AssertionError(e); + } + + Request request = new Request(HttpMethod.POST, url); + request.addHeader("Content-Type", MediaType.APPLICATION_JSON); + request.setContent(serializedQuery.getBytes(StandardCharsets.UTF_8)); + return httpClientRef.go(request, StatusResponseHandler.getInstance()); + } + /** * Execute a SQL query against the given endpoint via the HTTP client. */ - protected void executeQuery( + protected void executeQueryWithContentType( String endpoint, String contentType, String query, @@ -150,4 +208,33 @@ protected void executeQuery( response.getContent().trim() ); } + + /** + * Execute a SQL query against the given endpoint via the HTTP client. + * + * @return response holder of a cancelled query + */ + protected StatusResponseHolder cancelQuery(String endpoint, String queryId) + { + URL url; + try { + url = new URL(StringUtils.format("%s/%s", endpoint, queryId)); + } + catch (MalformedURLException e) { + throw new AssertionError("Malformed URL"); + } + + Request request = new Request(HttpMethod.DELETE, url); + StatusResponseHolder response; + try { + response = httpClientRef.go(request, StatusResponseHandler.getInstance()) + .get(); + } + catch (InterruptedException | ExecutionException e) { + throw new AssertionError("Failed to execute a request", e); + } + + Assertions.assertNotNull(response); + return response; + } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryCancelTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryCancelTest.java new file mode 100644 index 000000000000..e78451d4e85f --- /dev/null +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryCancelTest.java @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded.query; + +import com.google.common.collect.ImmutableMap; +import com.google.common.util.concurrent.ListenableFuture; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.http.client.response.StatusResponseHolder; +import org.apache.druid.query.BaseQuery; +import org.apache.druid.query.http.ClientSqlQuery; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.List; + +public class SqlQueryCancelTest extends QueryTestBase +{ + private static final String QUERY = " SELECT sleep(2) FROM %s LIMIT 2"; + private String dataSourceName; + + @Override + public void beforeAll() + { + jsonMapper = overlord.bindings().jsonMapper(); + dataSourceName = ingestBasicData(); + } + + @Test + public void testCancelValidQuery() throws Exception + { + final String sqlQuery = StringUtils.format(QUERY, dataSourceName); + final String queryId = "sql-cancel-test"; + final ClientSqlQuery query = new ClientSqlQuery( + sqlQuery, + null, + false, + false, + false, + ImmutableMap.of(BaseQuery.SQL_QUERY_ID, queryId), + List.of() + ); + + ListenableFuture f = executeQueryAsync(routerEndpoint, query); + + // Wait until the sqlLifecycle is authorized and registered + Thread.sleep(300L); + StatusResponseHolder queryCancellationResponse = cancelQuery(routerEndpoint, queryId); + + StatusResponseHolder queryResponce = f.get(); + + Assertions.assertEquals(HttpResponseStatus.ACCEPTED.getCode(), queryCancellationResponse.getStatus().getCode()); + Assertions.assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR.getCode(), queryResponce.getStatus().getCode()); + } + + @Test + public void test_cancelInvalidQuery_returnsNotFound() throws Exception + { + final String sqlQuery = StringUtils.format(QUERY, dataSourceName); + final String validQueryId = "sql-cancel-test-"; + final String invalidQueryId = "sql-continue-test"; + final ClientSqlQuery query = new ClientSqlQuery( + sqlQuery, + null, + false, + false, + false, + ImmutableMap.of(BaseQuery.SQL_QUERY_ID, validQueryId), + List.of() + ); + + ListenableFuture f = executeQueryAsync(routerEndpoint, query); + StatusResponseHolder queryCancellationResponse = cancelQuery(routerEndpoint, invalidQueryId); + + StatusResponseHolder queryResponse = f.get(); + + Assertions.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), queryCancellationResponse.getStatus().getCode()); + Assertions.assertEquals(HttpResponseStatus.OK.getCode(), queryResponse.getStatus().getCode()); + } +} diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryHttpRequestHeadersTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryHttpRequestHeadersTest.java index 3ce50da68da3..0dff59c2265b 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryHttpRequestHeadersTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SqlQueryHttpRequestHeadersTest.java @@ -38,7 +38,7 @@ public class SqlQueryHttpRequestHeadersTest extends QueryTestBase @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testNullContentType(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, null, "select 1", @@ -52,7 +52,7 @@ public void testNullContentType(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testUnsupportedContentType(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, "application/xml", "select 1", @@ -69,7 +69,7 @@ public void testUnsupportedContentType(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testTextPlain(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.TEXT_PLAIN, "select \n1", @@ -86,7 +86,7 @@ public void testTextPlain(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testFormURLEncoded(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_FORM_URLENCODED, URLEncoder.encode("select 'x % y'", StandardCharsets.UTF_8), @@ -103,7 +103,7 @@ public void testFormURLEncoded(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testFormURLEncoded_InvalidEncoding(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_FORM_URLENCODED, "select 'x % y'", @@ -120,7 +120,7 @@ public void testFormURLEncoded_InvalidEncoding(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testJSON(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_JSON, "{\"query\":\"select 567\"}", @@ -132,7 +132,7 @@ public void testJSON(boolean shouldQueryBroker) } ); - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, "application/json; charset=UTF-8", "{\"query\":\"select 567\"}", @@ -149,7 +149,7 @@ public void testJSON(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testInvalidJSONFormat(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_JSON, "{\"query\":select 567}", @@ -166,7 +166,7 @@ public void testInvalidJSONFormat(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testEmptyQuery_TextPlain(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.TEXT_PLAIN, null, @@ -183,7 +183,7 @@ public void testEmptyQuery_TextPlain(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testEmptyQuery_UrlEncoded(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_FORM_URLENCODED, null, @@ -200,7 +200,7 @@ public void testEmptyQuery_UrlEncoded(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testBlankQuery_TextPlain(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.TEXT_PLAIN, " ", @@ -217,7 +217,7 @@ public void testBlankQuery_TextPlain(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testEmptyQuery_JSON(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.APPLICATION_JSON, null, @@ -234,7 +234,7 @@ public void testEmptyQuery_JSON(boolean shouldQueryBroker) @FieldSource("SHOULD_USE_BROKER_TO_QUERY") public void testMultipleContentType_usesFirstOne(boolean shouldQueryBroker) { - executeQuery( + executeQueryWithContentType( shouldQueryBroker ? brokerEndpoint : routerEndpoint, MediaType.TEXT_PLAIN, "SELECT 1", diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SystemTableQueryTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SystemTableQueryTest.java new file mode 100644 index 000000000000..faa16a43f89f --- /dev/null +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/SystemTableQueryTest.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded.query; + + +import org.apache.druid.java.util.common.StringUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** + * Integration test for system table queries. + * In this test we're using deterministic table names to avoid flaky behavior of the test. + */ +public class SystemTableQueryTest extends QueryTestBase +{ + private String testDataSourceName; + + @Override + public void beforeAll() + { + testDataSourceName = ingestBasicData(); + } + + @Test + public void testSystemTableQueries_segmentsCount() + { + String query = StringUtils.format( + "SELECT datasource, count(*) \n" + + "FROM sys.segments \n" + + "WHERE datasource='%s' \n" + + "GROUP BY 1", + testDataSourceName + ); + + String result = cluster.callApi().runSql(query); + Assertions.assertEquals(StringUtils.format("%s,10", testDataSourceName), result); + } + + @Test + public void testSystemTableQueries_serverTypes() + { + String query = "SELECT server_type FROM sys.servers WHERE tier IS NOT NULL AND server_type <> 'indexer'"; + Assertions.assertEquals("historical", cluster.callApi().runSql(query)); + } +} diff --git a/integration-tests/docker/docker-compose.query-error-test.yml b/integration-tests/docker/docker-compose.query-error-test.yml deleted file mode 100644 index c7fed645ce55..000000000000 --- a/integration-tests/docker/docker-compose.query-error-test.yml +++ /dev/null @@ -1,107 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: "2.2" -services: - druid-zookeeper-kafka: - extends: - file: docker-compose.base.yml - service: druid-zookeeper-kafka - - druid-metadata-storage: - extends: - file: docker-compose.base.yml - service: druid-metadata-storage - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-overlord: - extends: - file: docker-compose.base.yml - service: druid-overlord - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - druid-metadata-storage: - condition: service_healthy - druid-zookeeper-kafka: - condition: service_started - - druid-coordinator: - extends: - file: docker-compose.base.yml - service: druid-coordinator - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - druid-overlord: - condition: service_started - druid-metadata-storage: - condition: service_healthy - druid-zookeeper-kafka: - condition: service_started - - druid-broker: - extends: - file: docker-compose.base.yml - service: druid-broker - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-historical-for-query-error-test - - druid-router: - extends: - file: docker-compose.base.yml - service: druid-router - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-coordinator - - druid-broker - - druid-historical-for-query-error-test: - image: druid/cluster - container_name: druid-historical-for-query-error-test - networks: - druid-it-net: - ipv4_address: 172.172.172.14 - ports: - - 8086:8083 #8084 is used by mono service on GHA runners - - 8284:8283 - - 5010:5007 - privileged: true - volumes: - - ${HOME}/shared:/shared - - ./service-supervisords/druid.conf:/usr/lib/druid/conf/druid.conf - env_file: - - ./environment-configs/common - - ./environment-configs/historical-for-query-error-test - - ${OVERRIDE_ENV} - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - -networks: - druid-it-net: - name: druid-it-net - ipam: - config: - - subnet: 172.172.172.0/24 \ No newline at end of file diff --git a/integration-tests/docker/docker-compose.query-retry-test.yml b/integration-tests/docker/docker-compose.query-retry-test.yml deleted file mode 100644 index 0a5c8bfc21bc..000000000000 --- a/integration-tests/docker/docker-compose.query-retry-test.yml +++ /dev/null @@ -1,107 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -version: "2.2" -services: - druid-zookeeper-kafka: - extends: - file: docker-compose.base.yml - service: druid-zookeeper-kafka - - druid-metadata-storage: - extends: - file: docker-compose.base.yml - service: druid-metadata-storage - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-overlord: - extends: - file: docker-compose.base.yml - service: druid-overlord - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - druid-metadata-storage: - condition: service_healthy - druid-zookeeper-kafka: - condition: service_started - - druid-coordinator: - extends: - file: docker-compose.base.yml - service: druid-coordinator - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - druid-overlord: - condition: service_started - druid-metadata-storage: - condition: service_healthy - druid-zookeeper-kafka: - condition: service_started - - druid-broker: - extends: - file: docker-compose.base.yml - service: druid-broker - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-historical-for-query-retry-test - - druid-router: - extends: - file: docker-compose.base.yml - service: druid-router - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - - druid-coordinator - - druid-broker - - druid-historical-for-query-retry-test: - image: druid/cluster - container_name: druid-historical-for-query-retry-test - networks: - druid-it-net: - ipv4_address: 172.172.172.14 - ports: - - 8086:8083 #8084 is used by mono service on GHA runners - - 8284:8283 - - 5010:5007 - privileged: true - volumes: - - ${HOME}/shared:/shared - - ./service-supervisords/druid.conf:/usr/lib/druid/conf/druid.conf - env_file: - - ./environment-configs/common - - ./environment-configs/historical-for-query-error-test - - ${OVERRIDE_ENV} - environment: - - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - depends_on: - - druid-zookeeper-kafka - -networks: - druid-it-net: - name: druid-it-net - ipam: - config: - - subnet: 172.172.172.0/24 \ No newline at end of file diff --git a/integration-tests/docker/druid.sh b/integration-tests/docker/druid.sh index 129e3722ee60..6d666af1a17e 100755 --- a/integration-tests/docker/druid.sh +++ b/integration-tests/docker/druid.sh @@ -22,8 +22,8 @@ getConfPath() cluster_conf_base=/tmp/conf/druid/cluster case "$1" in _common) echo $cluster_conf_base/_common ;; - historical) echo $cluster_conf_base/data/historical ;; historical-for-query-error-test) echo $cluster_conf_base/data/historical ;; + historical) echo $cluster_conf_base/data/historical ;; middleManager) echo $cluster_conf_base/data/middleManager ;; indexer) echo $cluster_conf_base/data/indexer ;; coordinator) echo $cluster_conf_base/master/coordinator ;; @@ -103,7 +103,7 @@ setupData() # The "query" and "security" test groups require data to be setup before running the tests. # In particular, they requires segments to be download from a pre-existing s3 bucket. # This is done by using the loadSpec put into metadatastore and s3 credientials set below. - if [ "$DRUID_INTEGRATION_TEST_GROUP" = "query" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-error" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "upgrade" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "centralized-datasource-schema" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "cds-task-schema-publish-disabled" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "cds-coordinator-metadata-query-disabled" ]; then + if [ "$DRUID_INTEGRATION_TEST_GROUP" = "query" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "upgrade" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "centralized-datasource-schema" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "cds-task-schema-publish-disabled" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "cds-coordinator-metadata-query-disabled" ]; then cat /test-data/${DRUID_INTEGRATION_TEST_GROUP}-sample-data.sql | mysql -u root druid fi diff --git a/integration-tests/docker/environment-configs/historical-for-query-error-test b/integration-tests/docker/environment-configs/historical-for-query-error-test index b2eb3460d4f1..da842e5374ad 100644 --- a/integration-tests/docker/environment-configs/historical-for-query-error-test +++ b/integration-tests/docker/environment-configs/historical-for-query-error-test @@ -1,3 +1,4 @@ +@@ -1,32 +0,0 @@ # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -29,4 +30,4 @@ druid_processing_numThreads=2 druid_query_groupBy_maxOnDiskStorage=300000000 druid_segmentCache_locations=[{"path":"/shared/druid/indexCache-query-error-test","maxSize":5000000000}] druid_auth_basic_common_cacheDirectory=/tmp/authCache/historical-query-error-test -druid_server_https_crlPath=/tls/revocations.crl +druid_server_https_crlPath=/tls/revocations.crl \ No newline at end of file diff --git a/integration-tests/docker/test-data/query-error-sample-data.sql b/integration-tests/docker/test-data/query-error-sample-data.sql deleted file mode 100644 index abe0f115189b..000000000000 --- a/integration-tests/docker/test-data/query-error-sample-data.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Licensed to the Apache Software Foundation (ASF) under one or more --- contributor license agreements. See the NOTICE file distributed with --- this work for additional information regarding copyright ownership. --- The ASF licenses this file to You under the Apache License, Version 2.0 --- (the "License"); you may not use this file except in compliance with --- the License. You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z_2013-01-02T04:13:41.980Z_v9','twitterstream','2013-05-13T01:08:18.192Z','2013-01-01T00:00:00.000Z','2013-01-02T00:00:00.000Z',0,'2013-01-02T04:13:41.980Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z\",\"version\":\"2013-01-02T04:13:41.980Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/2013-01-02T04:13:41.980Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":445235220,\"identifier\":\"twitterstream_2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z_2013-01-02T04:13:41.980Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z_2013-01-03T03:44:58.791Z_v9','twitterstream','2013-05-13T00:03:28.640Z','2013-01-02T00:00:00.000Z','2013-01-03T00:00:00.000Z',0,'2013-01-03T03:44:58.791Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-02T00:00:00.000Z/2013-01-03T00:00:00.000Z\",\"version\":\"2013-01-03T03:44:58.791Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z/2013-01-03T03:44:58.791Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":435325540,\"identifier\":\"twitterstream_2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z_2013-01-03T03:44:58.791Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9','twitterstream','2013-05-13T00:03:48.807Z','2013-01-03T00:00:00.000Z','2013-01-04T00:00:00.000Z',0,'2013-01-04T04:09:13.590Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-03T00:00:00.000Z/2013-01-04T00:00:00.000Z\",\"version\":\"2013-01-04T04:09:13.590Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z/2013-01-04T04:09:13.590Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":411651320,\"identifier\":\"twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','wikipedia_editstream','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"wikipedia_editstream\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id, dataSource, created_date, start, end, partitioned, version, used, payload,used_status_last_updated) VALUES ('wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z', 'wikipedia', '2013-08-08T21:26:23.799Z', '2013-08-01T00:00:00.000Z', '2013-08-02T00:00:00.000Z', '0', '2013-08-08T21:22:48.989Z', '1', '{\"dataSource\":\"wikipedia\",\"interval\":\"2013-08-01T00:00:00.000Z/2013-08-02T00:00:00.000Z\",\"version\":\"2013-08-08T21:22:48.989Z\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia/20130801T000000.000Z_20130802T000000.000Z/2013-08-08T21_22_48.989Z/0/index.zip\"},\"dimensions\":\"dma_code,continent_code,geo,area_code,robot,country_name,network,city,namespace,anonymous,unpatrolled,page,postal_code,language,newpage,user,region_lookup\",\"metrics\":\"count,delta,variation,added,deleted\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":24664730,\"identifier\":\"wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z\"}','1970-01-01T00:00:00.000Z'); diff --git a/integration-tests/docker/test-data/query-retry-sample-data.sql b/integration-tests/docker/test-data/query-retry-sample-data.sql deleted file mode 100644 index abe0f115189b..000000000000 --- a/integration-tests/docker/test-data/query-retry-sample-data.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Licensed to the Apache Software Foundation (ASF) under one or more --- contributor license agreements. See the NOTICE file distributed with --- this work for additional information regarding copyright ownership. --- The ASF licenses this file to You under the Apache License, Version 2.0 --- (the "License"); you may not use this file except in compliance with --- the License. You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z_2013-01-02T04:13:41.980Z_v9','twitterstream','2013-05-13T01:08:18.192Z','2013-01-01T00:00:00.000Z','2013-01-02T00:00:00.000Z',0,'2013-01-02T04:13:41.980Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z\",\"version\":\"2013-01-02T04:13:41.980Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z/2013-01-02T04:13:41.980Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":445235220,\"identifier\":\"twitterstream_2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z_2013-01-02T04:13:41.980Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z_2013-01-03T03:44:58.791Z_v9','twitterstream','2013-05-13T00:03:28.640Z','2013-01-02T00:00:00.000Z','2013-01-03T00:00:00.000Z',0,'2013-01-03T03:44:58.791Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-02T00:00:00.000Z/2013-01-03T00:00:00.000Z\",\"version\":\"2013-01-03T03:44:58.791Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z/2013-01-03T03:44:58.791Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":435325540,\"identifier\":\"twitterstream_2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z_2013-01-03T03:44:58.791Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9','twitterstream','2013-05-13T00:03:48.807Z','2013-01-03T00:00:00.000Z','2013-01-04T00:00:00.000Z',0,'2013-01-04T04:09:13.590Z_v9',1,'{\"dataSource\":\"twitterstream\",\"interval\":\"2013-01-03T00:00:00.000Z/2013-01-04T00:00:00.000Z\",\"version\":\"2013-01-04T04:09:13.590Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/twitterstream/2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z/2013-01-04T04:09:13.590Z_v9/0/index.zip\"},\"dimensions\":\"has_links,first_hashtag,user_time_zone,user_location,has_mention,user_lang,rt_name,user_name,is_retweet,is_viral,has_geo,url_domain,user_mention_name,reply_to_name\",\"metrics\":\"count,tweet_length,num_followers,num_links,num_mentions,num_hashtags,num_favorites,user_total_tweets\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":411651320,\"identifier\":\"twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id,dataSource,created_date,start,end,partitioned,version,used,payload,used_status_last_updated) VALUES ('wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','wikipedia_editstream','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"wikipedia_editstream\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia_editstream/2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z/2013-01-10T08:13:47.830Z_v9/0/index.zip\"},\"dimensions\":\"anonymous,area_code,city,continent_code,country_name,dma_code,geo,language,namespace,network,newpage,page,postal_code,region_lookup,robot,unpatrolled,user\",\"metrics\":\"added,count,deleted,delta,delta_hist,unique_users,variation\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":446027801,\"identifier\":\"wikipedia_editstream_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9\"}','1970-01-01T00:00:00.000Z'); -INSERT INTO druid_segments (id, dataSource, created_date, start, end, partitioned, version, used, payload,used_status_last_updated) VALUES ('wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z', 'wikipedia', '2013-08-08T21:26:23.799Z', '2013-08-01T00:00:00.000Z', '2013-08-02T00:00:00.000Z', '0', '2013-08-08T21:22:48.989Z', '1', '{\"dataSource\":\"wikipedia\",\"interval\":\"2013-08-01T00:00:00.000Z/2013-08-02T00:00:00.000Z\",\"version\":\"2013-08-08T21:22:48.989Z\",\"loadSpec\":{\"type\":\"s3_zip\",\"bucket\":\"static.druid.io\",\"key\":\"data/segments/wikipedia/20130801T000000.000Z_20130802T000000.000Z/2013-08-08T21_22_48.989Z/0/index.zip\"},\"dimensions\":\"dma_code,continent_code,geo,area_code,robot,country_name,network,city,namespace,anonymous,unpatrolled,page,postal_code,language,newpage,user,region_lookup\",\"metrics\":\"count,delta,variation,added,deleted\",\"shardSpec\":{\"type\":\"none\"},\"binaryVersion\":9,\"size\":24664730,\"identifier\":\"wikipedia_2013-08-01T00:00:00.000Z_2013-08-02T00:00:00.000Z_2013-08-08T21:22:48.989Z\"}','1970-01-01T00:00:00.000Z'); diff --git a/integration-tests/script/docker_compose_args.sh b/integration-tests/script/docker_compose_args.sh index fff93605e97b..87475239e623 100644 --- a/integration-tests/script/docker_compose_args.sh +++ b/integration-tests/script/docker_compose_args.sh @@ -28,8 +28,8 @@ getComposeArgs() fi if [ "$DRUID_INTEGRATION_TEST_INDEXER" = "indexer" ] then - # Sanity check: cannot combine CliIndexer tests with security, query-retry tests - if [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-error" ] + # Sanity check: cannot combine CliIndexer tests with security tests + if [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] then echo "Cannot run test group '$DRUID_INTEGRATION_TEST_GROUP' with CliIndexer" exit 1 @@ -41,16 +41,6 @@ getComposeArgs() then # default + additional druid router (custom-check-tls, permissive-tls, no-client-auth-tls) echo "-f ${DOCKERDIR}/docker-compose.yml -f ${DOCKERDIR}/docker-compose.security.yml" - elif [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] - then - # default + additional historical modified for query retry test - # See CliHistoricalForQueryRetryTest. - echo "-f ${DOCKERDIR}/docker-compose.query-retry-test.yml" - elif [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-error" ] - then - # default + additional historical modified for query error test - # See CliHistoricalForQueryRetryTest. - echo "-f ${DOCKERDIR}/docker-compose.query-error-test.yml" elif [ "$DRUID_INTEGRATION_TEST_GROUP" = "kinesis-data-format" ] then # default + with override config + schema registry container diff --git a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java index 21c3fc7a76bf..efdd26de20a6 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java @@ -40,10 +40,6 @@ public class TestNGGroup */ public static final String QUERY = "query"; - public static final String QUERY_RETRY = "query-retry"; - - public static final String QUERY_ERROR = "query-error"; - /** * This group can only be run individually using -Dgroups=security since it requires specific test data setup. */ diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java deleted file mode 100644 index 8385f643e526..000000000000 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSqlCancelTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.tests.query; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.RE; -import org.apache.druid.java.util.http.client.response.StatusResponseHolder; -import org.apache.druid.query.BaseQuery; -import org.apache.druid.query.QueryException; -import org.apache.druid.sql.http.SqlQuery; -import org.apache.druid.testing.clients.SqlResourceTestClient; -import org.apache.druid.testing.guice.DruidTestModuleFactory; -import org.apache.druid.testing.tools.IntegrationTestingConfig; -import org.apache.druid.testing.utils.DataLoaderHelper; -import org.apache.druid.testing.utils.SqlTestQueryHelper; -import org.apache.druid.tests.TestNGGroup; -import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; - -@Test(groups = {TestNGGroup.QUERY, TestNGGroup.CENTRALIZED_DATASOURCE_SCHEMA}) -@Guice(moduleFactory = DruidTestModuleFactory.class) -public class ITSqlCancelTest -{ - private static final String WIKIPEDIA_DATA_SOURCE = "wikipedia_editstream"; - - /** - * This query will run exactly for 15 seconds. - */ - private static final String QUERY - = "SELECT sleep(CASE WHEN added > 0 THEN 1 ELSE 0 END) FROM wikipedia_editstream WHERE added > 0 LIMIT 15"; - - private static final int NUM_QUERIES = 3; - - @Inject - private DataLoaderHelper dataLoaderHelper; - @Inject - private SqlTestQueryHelper sqlHelper; - @Inject - private SqlResourceTestClient sqlClient; - @Inject - private IntegrationTestingConfig config; - @Inject - private ObjectMapper jsonMapper; - - @BeforeMethod - public void before() - { - // ensure that wikipedia segments are loaded completely - dataLoaderHelper.waitUntilDatasourceIsReady(WIKIPEDIA_DATA_SOURCE); - } - - @Test - public void testCancelValidQuery() throws Exception - { - final String queryId = "sql-cancel-test"; - final List> queryResponseFutures = new ArrayList<>(); - for (int i = 0; i < NUM_QUERIES; i++) { - queryResponseFutures.add( - sqlClient.queryAsync( - sqlHelper.getQueryURL(config.getRouterUrl()), - new SqlQuery(QUERY, null, false, false, false, ImmutableMap.of(BaseQuery.SQL_QUERY_ID, queryId), null) - ) - ); - } - - // Wait until the sqlLifecycle is authorized and registered - Thread.sleep(1000); - final HttpResponseStatus responseStatus = sqlClient.cancelQuery( - sqlHelper.getCancelUrl(config.getRouterUrl(), queryId), - 1000 - ); - if (!responseStatus.equals(HttpResponseStatus.ACCEPTED)) { - throw new RE("Failed to cancel query [%s]. Response code was [%s]", queryId, responseStatus); - } - - for (Future queryResponseFuture : queryResponseFutures) { - final StatusResponseHolder queryResponse = queryResponseFuture.get(1, TimeUnit.SECONDS); - if (!queryResponse.getStatus().equals(HttpResponseStatus.INTERNAL_SERVER_ERROR)) { - throw new ISE("Query is not canceled after cancel request"); - } - QueryException queryException = jsonMapper.readValue(queryResponse.getContent(), QueryException.class); - if (!"Query cancelled".equals(queryException.getErrorCode())) { - throw new ISE( - "Expected error code [%s], actual [%s]", - "Query cancelled", - queryException.getErrorCode() - ); - } - } - } - - @Test - public void testCancelInvalidQuery() throws Exception - { - final Future queryResponseFuture = sqlClient - .queryAsync( - sqlHelper.getQueryURL(config.getRouterUrl()), - new SqlQuery(QUERY, null, false, false, false, ImmutableMap.of(BaseQuery.SQL_QUERY_ID, "validId"), null) - ); - - // Wait until the sqlLifecycle is authorized and registered - Thread.sleep(1000); - final HttpResponseStatus responseStatus = sqlClient.cancelQuery( - sqlHelper.getCancelUrl(config.getRouterUrl(), "invalidId"), - 1000 - ); - if (!responseStatus.equals(HttpResponseStatus.NOT_FOUND)) { - throw new RE("Expected http response [%s], actual response [%s]", HttpResponseStatus.NOT_FOUND, responseStatus); - } - - final StatusResponseHolder queryResponse = queryResponseFuture.get(30, TimeUnit.SECONDS); - if (!queryResponse.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Cancel request failed with status[%s] and content[%s]", - queryResponse.getStatus(), - queryResponse.getContent() - ); - } - } -} diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java deleted file mode 100644 index 5cf022eef376..000000000000 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITSystemTableQueryTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.tests.query; - -import com.google.inject.Inject; -import org.apache.druid.testing.guice.DruidTestModuleFactory; -import org.apache.druid.testing.tools.IntegrationTestingConfig; -import org.apache.druid.testing.utils.DataLoaderHelper; -import org.apache.druid.testing.utils.SqlTestQueryHelper; -import org.apache.druid.tests.TestNGGroup; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -@Test(groups = {TestNGGroup.QUERY, TestNGGroup.CENTRALIZED_DATASOURCE_SCHEMA}) -@Guice(moduleFactory = DruidTestModuleFactory.class) -public class ITSystemTableQueryTest -{ - private static final String WIKIPEDIA_DATA_SOURCE = "wikipedia_editstream"; - private static final String TWITTER_DATA_SOURCE = "twitterstream"; - private static final String SYSTEM_QUERIES_RESOURCE = "/queries/sys_queries.json"; - - @Inject - DataLoaderHelper dataLoaderHelper; - @Inject - private SqlTestQueryHelper queryHelper; - @Inject - IntegrationTestingConfig config; - - @BeforeMethod - public void before() - { - // ensure that wikipedia segments are loaded completely - dataLoaderHelper.waitUntilDatasourceIsReady(WIKIPEDIA_DATA_SOURCE); - - // ensure that the twitter segments are loaded completely - dataLoaderHelper.waitUntilDatasourceIsReady(TWITTER_DATA_SOURCE); - } - - @Test - public void testSystemTableQueries() - { - try { - this.queryHelper.testQueriesFromFile(SYSTEM_QUERIES_RESOURCE); - } - catch (Exception e) { - throw new RuntimeException(e); - } - } -} diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java deleted file mode 100644 index ebbaf1795e69..000000000000 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITTwitterQueryTest.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.tests.query; - -import com.google.inject.Inject; -import org.apache.druid.testing.clients.CoordinatorResourceTestClient; -import org.apache.druid.testing.guice.DruidTestModuleFactory; -import org.apache.druid.testing.tools.ITRetryUtil; -import org.apache.druid.testing.utils.TestQueryHelper; -import org.apache.druid.tests.TestNGGroup; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -@Test(groups = {TestNGGroup.QUERY, TestNGGroup.CENTRALIZED_DATASOURCE_SCHEMA}) -@Guice(moduleFactory = DruidTestModuleFactory.class) -public class ITTwitterQueryTest -{ - private static final String TWITTER_DATA_SOURCE = "twitterstream"; - private static final String TWITTER_QUERIES_RESOURCE = "/queries/twitterstream_queries.json"; - @Inject - CoordinatorResourceTestClient coordinatorClient; - @Inject - private TestQueryHelper queryHelper; - - @BeforeMethod - public void before() - { - // ensure that the twitter segments are loaded completely - ITRetryUtil.retryUntilTrue( - () -> coordinatorClient.areSegmentsLoaded(TWITTER_DATA_SOURCE), "twitter segment load" - ); - } - - @Test - public void testTwitterQueriesFromFile() throws Exception - { - queryHelper.testQueriesFromFile(TWITTER_QUERIES_RESOURCE); - } - -} diff --git a/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java index 18d29922cd5f..4bb4653877d1 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/query/ITWikipediaQueryTest.java @@ -84,9 +84,9 @@ public void before() throws Exception /** * A combination of request Content-Type and Accept HTTP header - * The first is Content-Type which can not be null while the 2nd is Accept which could be null + * The first is Content-Type that cannot be null while the 2nd is Accept that could be null *

- * When Accept is null, its value defaults to value of Content-Type + * When Accept is null, its value defaults to the value of Content-Type */ @DataProvider public static Object[][] encodingCombination() diff --git a/integration-tests/src/test/resources/queries/sys_queries.json b/integration-tests/src/test/resources/queries/sys_queries.json deleted file mode 100644 index 284c60272a68..000000000000 --- a/integration-tests/src/test/resources/queries/sys_queries.json +++ /dev/null @@ -1,30 +0,0 @@ -[ - { - "query": { - "query": "SELECT datasource, count(*) FROM sys.segments WHERE datasource='wikipedia_editstream' OR datasource='twitterstream' GROUP BY 1 " - }, - "expectedResults": [ - { - "datasource": "wikipedia_editstream", - "EXPR$1": 1 - }, - { - "datasource": "twitterstream", - "EXPR$1": 3 - } - ] - }, - { - "query": { - "query": "SELECT server_type FROM sys.servers WHERE tier IS NOT NULL AND server_type <> 'indexer'" - }, - "expectedResults": [ - { - "server_type":"historical" - }, - { - "server_type":"broker" - } - ] - } -] \ No newline at end of file diff --git a/integration-tests/src/test/resources/queries/twitterstream_queries.json b/integration-tests/src/test/resources/queries/twitterstream_queries.json deleted file mode 100644 index c83d616d789d..000000000000 --- a/integration-tests/src/test/resources/queries/twitterstream_queries.json +++ /dev/null @@ -1,810 +0,0 @@ -[ - { - "description": "timeseries, 2 aggs", - "query": { - "queryType": "timeseries", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000 - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": { - "tweet_length": 2.40241323E8, - "num_tweets": 3754028.0 - } - }, - { - "timestamp": "2013-01-02T00:00:00.000Z", - "result": { - "tweet_length": 2.46397801E8, - "num_tweets": 3799466.0 - } - }, - { - "timestamp": "2013-01-03T00:00:00.000Z", - "result": { - "tweet_length": 2.31365019E8, - "num_tweets": 3552419.0 - } - } - ] - }, - { - "description": "topN, 2 aggs, lexicographic", - "query": { - "queryType": "topN", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "postAggregations": [ - { - "type": "arithmetic", - "name": "avg_tweet_len", - "fn": "/", - "fields": [ - { - "type": "fieldAccess", - "name": "tweet_length", - "fieldName": "tweet_length" - }, - { - "type": "fieldAccess", - "name": "num_tweets", - "fieldName": "num_tweets" - } - ] - } - ], - "dimension": "user_name", - "metric": { - "type": "lexicographic" - }, - "threshold": 2, - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000, - "useTopNMultiPassPooledQueryGranularity": "true" - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": [ - { - "user_name": "000000000000087", - "tweet_length": 14.0, - "num_tweets": 1.0, - "avg_tweet_len": 14.0 - }, - { - "user_name": "0000000000mghi", - "tweet_length": 291.0, - "num_tweets": 4.0, - "avg_tweet_len": 72.75 - } - ] - }, - { - "timestamp": "2013-01-02T00:00:00.000Z", - "result": [ - { - "user_name": "000000000037", - "tweet_length": 13.0, - "num_tweets": 1.0, - "avg_tweet_len": 13.0 - }, - { - "user_name": "0000000000mghi", - "tweet_length": 21.0, - "num_tweets": 1.0, - "avg_tweet_len": 21.0 - } - ] - }, - { - "timestamp": "2013-01-03T00:00:00.000Z", - "result": [ - { - "user_name": "000000007", - "tweet_length": 37.0, - "num_tweets": 1.0, - "avg_tweet_len": 37.0 - }, - { - "user_name": "00000000b", - "tweet_length": 119.0, - "num_tweets": 1.0, - "avg_tweet_len": 119.0 - } - ] - } - ] - }, - { - "description": "topN, 2 aggs", - "query": { - "queryType": "topN", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "postAggregations": [ - { - "type": "arithmetic", - "name": "avg_tweet_len", - "fn": "/", - "fields": [ - { - "type": "fieldAccess", - "name": "tweet_length", - "fieldName": "tweet_length" - }, - { - "type": "fieldAccess", - "name": "num_tweets", - "fieldName": "num_tweets" - } - ] - } - ], - "dimension": "user_name", - "metric": { - "type": "numeric", - "metric": "num_tweets" - }, - "threshold": 2, - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000, - "useTopNMultiPassPooledQueryGranularity": "true" - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": [ - { - "user_name": "Favstar_Bot", - "tweet_length": 2002.0, - "num_tweets": 33.0, - "avg_tweet_len": 60.666666666666664 - }, - { - "user_name": "SportsAB", - "tweet_length": 1114.0, - "num_tweets": 26.0, - "avg_tweet_len": 42.84615384615385 - } - ] - }, - { - "timestamp": "2013-01-02T00:00:00.000Z", - "result": [ - { - "user_name": "Favstar_Bot", - "tweet_length": 2185.0, - "num_tweets": 36.0, - "avg_tweet_len": 60.69444444444444 - }, - { - "user_name": "SportsAB", - "tweet_length": 1148.0, - "num_tweets": 23.0, - "avg_tweet_len": 49.91304347826087 - } - ] - }, - { - "timestamp": "2013-01-03T00:00:00.000Z", - "result": [ - { - "user_name": "SportsAB", - "tweet_length": 882.0, - "num_tweets": 22.0, - "avg_tweet_len": 40.09090909090909 - }, - { - "user_name": "furin0620", - "tweet_length": 867.0, - "num_tweets": 21.0, - "avg_tweet_len": 41.285714285714285 - } - ] - } - ] - }, - { - "description": "topN, 2 aggs, filtered", - "query": { - "queryType": "topN", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "filter": { - "type": "or", - "fields": [ - { - "type": "selector", - "dimension": "user_name", - "value": "Favstar_Bot" - }, - { - "type": "selector", - "dimension": "user_name", - "value": "SportsAB" - }, - { - "type": "selector", - "dimension": "user_name", - "value": "furin0620" - } - ] - }, - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "postAggregations": [ - { - "type": "arithmetic", - "name": "avg_tweet_len", - "fn": "/", - "fields": [ - { - "type": "fieldAccess", - "name": "tweet_length", - "fieldName": "tweet_length" - }, - { - "type": "fieldAccess", - "name": "num_tweets", - "fieldName": "num_tweets" - } - ] - } - ], - "dimension": "user_name", - "metric": { - "type": "numeric", - "metric": "num_tweets" - }, - "threshold": 2, - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000, - "useTopNMultiPassPooledQueryGranularity": "true" - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": [ - { - "user_name": "Favstar_Bot", - "tweet_length": 2002.0, - "num_tweets": 33.0, - "avg_tweet_len": 60.666666666666664 - }, - { - "user_name": "SportsAB", - "tweet_length": 1114.0, - "num_tweets": 26.0, - "avg_tweet_len": 42.84615384615385 - } - ] - }, - { - "timestamp": "2013-01-02T00:00:00.000Z", - "result": [ - { - "user_name": "Favstar_Bot", - "tweet_length": 2185.0, - "num_tweets": 36.0, - "avg_tweet_len": 60.69444444444444 - }, - { - "user_name": "SportsAB", - "tweet_length": 1148.0, - "num_tweets": 23.0, - "avg_tweet_len": 49.91304347826087 - } - ] - }, - { - "timestamp": "2013-01-03T00:00:00.000Z", - "result": [ - { - "user_name": "SportsAB", - "tweet_length": 882.0, - "num_tweets": 22.0, - "avg_tweet_len": 40.09090909090909 - }, - { - "user_name": "furin0620", - "tweet_length": 867.0, - "num_tweets": 21.0, - "avg_tweet_len": 41.285714285714285 - } - ] - } - ] - }, - { - "description": "groupBy", - "query": { - "queryType": "groupBy", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "dimensions": ["has_links"] - }, - "expectedResults": [ - { - "version": "v1", - "timestamp": "2013-01-01T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 2.08803904E8, - "num_tweets": 3377791.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-01T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 3.1437419E7, - "num_tweets": 376237.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-02T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 2.10402683E8, - "num_tweets": 3375243.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-02T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 3.5995118E7, - "num_tweets": 424223.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-03T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 1.9645145E8, - "num_tweets": 3144985.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-03T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 3.4913569E7, - "num_tweets": 407434.0 - } - } - ] - }, - { - "query": { - "queryType": "search", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "dataSource": "twitterstream", - "granularity": "all", - "searchDimensions": ["user_name"], - "sort": { - "type": "lexicographic" - }, - "query": { - "type": "insensitive_contains", - "value": "1World_Sports" - }, - "limit": 3, - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000 - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": [ - { - "dimension": "user_name", - "value": "1World_Sports", - "count":1 - } - ] - } - ] - }, - { - "description": "groupByArbitraryInterval", - "query": { - "queryType": "groupBy", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T15:10:10.090/2013-01-03T19:30:01.090"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "dimensions": ["has_links"] - }, - "expectedResults": [ - { - "version": "v1", - "timestamp": "2013-01-01T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 7.4820449E7, - "num_tweets": 1170229.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-01T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 1.149719E7, - "num_tweets": 136582.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-02T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 2.10402683E8, - "num_tweets": 3375243.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-02T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 3.5995118E7, - "num_tweets": 424223.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-03T00:00:00.000Z", - "event": { - "has_links": "No", - "tweet_length": 1.59141096E8, - "num_tweets": 2567986.0 - } - }, - { - "version": "v1", - "timestamp": "2013-01-03T00:00:00.000Z", - "event": { - "has_links": "Yes", - "tweet_length": 2.8345444E7, - "num_tweets": 328917.0 - } - } - ] - }, - { - "description": "segmentMetadata", - "query": { - "queryType": "segmentMetadata", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "toInclude": { - "type": "list", - "columns": ["has_links", "has_links"] - } - }, - "expectedResults": [ - { - "id":"twitterstream_2013-01-01T00:00:00.000Z_2013-01-02T00:00:00.000Z_2013-01-02T04:13:41.980Z_v9", - "intervals":["2013-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"], - "columns":{ - "has_links":{ - "typeSignature": "STRING", - "type":"STRING", - "hasMultipleValues":false, - "size":0, - "cardinality":2, - "minValue":"No", - "maxValue":"Yes", - "errorMessage":null, - "hasNulls":false - } - }, - "size":0, - "numRows":3702583, - "aggregators":null, - "projections":null, - "timestampSpec":null, - "queryGranularity":null, - "rollup":null - }, - { - "id":"twitterstream_2013-01-02T00:00:00.000Z_2013-01-03T00:00:00.000Z_2013-01-03T03:44:58.791Z_v9", - "intervals":["2013-01-02T00:00:00.000Z/2013-01-03T00:00:00.000Z"], - "columns":{ - "has_links":{ - "typeSignature": "STRING", - "type":"STRING", - "hasMultipleValues":false, - "size":0, - "cardinality":2, - "minValue":"No", - "maxValue":"Yes", - "errorMessage":null, - "hasNulls":false - } - }, - "size":0, - "numRows":3743002, - "aggregators":null, - "projections":null, - "timestampSpec":null, - "queryGranularity":null, - "rollup":null - }, - { - "id":"twitterstream_2013-01-03T00:00:00.000Z_2013-01-04T00:00:00.000Z_2013-01-04T04:09:13.590Z_v9", - "intervals":["2013-01-03T00:00:00.000Z/2013-01-04T00:00:00.000Z"], - "columns":{ - "has_links":{ - "typeSignature": "STRING", - "type":"STRING", - "hasMultipleValues":false, - "size":0, - "cardinality":2, - "minValue":"No", - "maxValue":"Yes", - "errorMessage":null, - "hasNulls":false - } - }, - "size":0, - "numRows":3502959, - "aggregators":null, - "projections":null, - "timestampSpec":null, - "queryGranularity":null, - "rollup":null - } - ] - }, - { - "description": "topN, 2 aggs, topN over dependent postAgg", - "query": { - "queryType": "topN", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-04T00:00:00.000"], - "granularity": "day", - "aggregations": [ - { - "type": "doubleSum", - "name": "num_tweets", - "fieldName": "count" - }, - { - "type": "doubleSum", - "name": "tweet_length", - "fieldName": "tweet_length" - } - ], - "postAggregations": [ - { - "type": "arithmetic", - "name": "avg_tweet_len", - "fn": "/", - "fields": [ - { - "type": "fieldAccess", - "name": "tweet_length", - "fieldName": "tweet_length" - }, - { - "type": "fieldAccess", - "name": "num_tweets", - "fieldName": "num_tweets" - } - ] - }, - { - "type": "arithmetic", - "name": "avg_tweet_len_half", - "fn": "/", - "fields": [ - { - "type": "fieldAccess", - "name": "avg_tweet_len", - "fieldName": "avg_tweet_len" - }, - { - "type": "constant", - "value": "2" - } - ] - }, - { - "type": "arithmetic", - "name": "avg_tweet_len_doubled", - "fn": "*", - "fields": [ - { - "type": "fieldAccess", - "name": "avg_tweet_len", - "fieldName": "avg_tweet_len" - }, - { - "type": "constant", - "value": "2" - } - ] - } - ], - "dimension": "user_name", - "metric": { - "type": "numeric", - "metric": "avg_tweet_len_doubled" - }, - "threshold": 2, - "context": { - "useCache": "true", - "populateCache": "true", - "timeout": 60000, - "useTopNMultiPassPooledQueryGranularity": "true" - } - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": [ - { - "user_name": "___soMALIa___", - "tweet_length": 539.0, - "avg_tweet_len_half": 269.5, - "avg_tweet_len_doubled": 1078.0, - "num_tweets": 1.0, - "avg_tweet_len": 539.0 - }, - { - "user_name": "SophiiiaSlr", - "tweet_length": 530.0, - "avg_tweet_len_half": 265.0, - "avg_tweet_len_doubled": 1060.0, - "num_tweets": 1.0, - "avg_tweet_len": 530.0 - } - ] - }, - { - "timestamp": "2013-01-02T00:00:00.000Z", - "result": [ - { - "user_name": "FallenReckless", - "tweet_length": 518.0, - "avg_tweet_len_half": 259.0, - "avg_tweet_len_doubled": 1036.0, - "num_tweets": 1.0, - "avg_tweet_len": 518.0 - }, - { - "user_name": "SigaMike", - "tweet_length": 514.0, - "avg_tweet_len_half": 257.0, - "avg_tweet_len_doubled": 1028.0, - "num_tweets": 1.0, - "avg_tweet_len": 514.0 - } - ] - }, - { - "timestamp": "2013-01-03T00:00:00.000Z", - "result": [ - { - "user_name": "Alejo_InReverse", - "tweet_length": 560.0, - "avg_tweet_len_half": 280.0, - "avg_tweet_len_doubled": 1120.0, - "num_tweets": 1.0, - "avg_tweet_len": 560.0 - }, - { - "user_name": "GavLeftHome", - "tweet_length": 506.0, - "avg_tweet_len_half": 253.0, - "avg_tweet_len_doubled": 1012.0, - "num_tweets": 1.0, - "avg_tweet_len": 506.0 - } - ] - } - ] - } -] diff --git a/integration-tests/src/test/resources/queries/twitterstream_queries_query_retry_test.json b/integration-tests/src/test/resources/queries/twitterstream_queries_query_retry_test.json deleted file mode 100644 index ffad3ca58175..000000000000 --- a/integration-tests/src/test/resources/queries/twitterstream_queries_query_retry_test.json +++ /dev/null @@ -1,26 +0,0 @@ -[ - { - "description": "timeseries, 1 agg, all", - "query": { - "queryType": "timeseries", - "dataSource": "twitterstream", - "intervals": ["2013-01-01T00:00:00.000/2013-01-08T00:00:00.000"], - "granularity": "all", - "aggregations": [ - { - "type": "count", - "name": "rows" - } - ], - "context": %%CONTEXT%% - }, - "expectedResults": [ - { - "timestamp": "2013-01-01T00:00:00.000Z", - "result": { - "rows": 10948544 - } - } - ] - } -]