diff --git a/.github/workflows/revised-its.yml b/.github/workflows/revised-its.yml index 148f04f4b7f8..575f28c25acf 100644 --- a/.github/workflows/revised-its.yml +++ b/.github/workflows/revised-its.yml @@ -67,7 +67,7 @@ jobs: fail-fast: false matrix: jdk: [17] - it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Security, Query, DruidExactCountBitmap] + it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Query, DruidExactCountBitmap] indexer: [middleManager] uses: ./.github/workflows/reusable-revised-its.yml if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }} diff --git a/embedded-tests/pom.xml b/embedded-tests/pom.xml index a69976e1f9e9..bf8840ecd47b 100644 --- a/embedded-tests/pom.xml +++ b/embedded-tests/pom.xml @@ -86,6 +86,12 @@ ${project.parent.version} test + + org.apache.druid.extensions + druid-basic-security + ${project.parent.version} + test + joda-time joda-time diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthIndexingTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthIndexingTest.java new file mode 100644 index 000000000000..91b36be7ea09 --- /dev/null +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthIndexingTest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded.auth; + +import com.fasterxml.jackson.core.type.TypeReference; +import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.testing.embedded.EmbeddedDruidCluster; +import org.apache.druid.testing.embedded.EmbeddedRouter; +import org.apache.druid.testing.embedded.indexing.IndexTaskTest; +import org.jboss.netty.handler.codec.http.HttpMethod; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.util.List; + +public class BasicAuthIndexingTest extends IndexTaskTest +{ + @Override + public EmbeddedDruidCluster createCluster() + { + return EmbeddedDruidCluster + .withEmbeddedDerbyAndZookeeper() + .addResource(new EmbeddedBasicAuthResource()) + .useLatchableEmitter() + .addServer(coordinator) + .addServer(overlord) + .addServer(indexer) + .addServer(historical) + .addServer(broker) + .addServer(new EmbeddedRouter()) + .addCommonProperty("druid.indexer.autoscale.doAutoscale", "true"); + } + + @Test + public void test_getScalingStats_redirectFromCoordinatorToOverlord() + { + final List response = cluster.callApi().serviceClient().onLeaderCoordinator( + mapper -> new RequestBuilder(HttpMethod.GET, "/druid/indexer/v1/scaling"), + new TypeReference<>() {} + ); + Assertions.assertNotNull(response); + Assertions.assertTrue(response.isEmpty()); + } +} diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/auth/ITSecurityBasicQuery.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthMSQTest.java similarity index 61% rename from integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/auth/ITSecurityBasicQuery.java rename to embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthMSQTest.java index 3612faa9eb08..f6da6c1109ef 100644 --- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/auth/ITSecurityBasicQuery.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/BasicAuthMSQTest.java @@ -17,69 +17,101 @@ * under the License. */ -package org.apache.druid.testsEx.auth; +package org.apache.druid.testing.embedded.auth; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.common.utils.IdUtils; +import org.apache.druid.error.ExceptionMatcher; import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.http.client.response.StatusResponseHolder; -import org.apache.druid.msq.indexing.MSQControllerTask; +import org.apache.druid.metadata.DefaultPasswordProvider; +import org.apache.druid.msq.guice.IndexerMemoryManagementModule; +import org.apache.druid.msq.guice.MSQDurableStorageModule; +import org.apache.druid.msq.guice.MSQExternalDataSourceModule; +import org.apache.druid.msq.guice.MSQIndexingModule; +import org.apache.druid.msq.guice.MSQSqlModule; +import org.apache.druid.msq.guice.SqlTaskModule; +import org.apache.druid.query.http.ClientSqlQuery; +import org.apache.druid.query.http.SqlTaskStatus; +import org.apache.druid.security.basic.authentication.BasicHTTPEscalator; import org.apache.druid.server.security.Action; import org.apache.druid.server.security.Resource; import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.sql.http.SqlQuery; import org.apache.druid.storage.local.LocalFileExportStorageProvider; import org.apache.druid.storage.s3.output.S3ExportStorageProvider; -import org.apache.druid.testing.clients.CoordinatorResourceTestClient; -import org.apache.druid.testing.clients.OverlordResourceTestClient; -import org.apache.druid.testing.clients.SecurityClient; -import org.apache.druid.testing.utils.DataLoaderHelper; -import org.apache.druid.testing.utils.MsqTestQueryHelper; -import org.apache.druid.tests.indexer.AbstractIndexerTest; -import org.apache.druid.testsEx.categories.Security; -import org.apache.druid.testsEx.config.DruidTestRunner; -import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; - -import java.io.IOException; +import org.apache.druid.testing.embedded.EmbeddedBroker; +import org.apache.druid.testing.embedded.EmbeddedCoordinator; +import org.apache.druid.testing.embedded.EmbeddedDruidCluster; +import org.apache.druid.testing.embedded.EmbeddedIndexer; +import org.apache.druid.testing.embedded.EmbeddedOverlord; +import org.apache.druid.testing.embedded.EmbeddedServiceClient; +import org.apache.druid.testing.embedded.indexing.Resources; +import org.apache.druid.testing.embedded.junit5.EmbeddedClusterTestBase; +import org.apache.druid.testing.embedded.msq.MSQExportDirectory; +import org.hamcrest.MatcherAssert; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.File; import java.util.List; -import java.util.concurrent.ExecutionException; +import java.util.Map; -@RunWith(DruidTestRunner.class) -@Category(Security.class) -public class ITSecurityBasicQuery +public class BasicAuthMSQTest extends EmbeddedClusterTestBase { - @Inject - private MsqTestQueryHelper msqHelper; - - @Inject - private DataLoaderHelper dataLoaderHelper; - - @Inject - private CoordinatorResourceTestClient coordinatorClient; - @Inject - private SecurityClient securityClient; - @Inject - private OverlordResourceTestClient overlordResourceTestClient; - public static final String USER_1 = "user1"; public static final String ROLE_1 = "role1"; public static final String USER_1_PASSWORD = "password1"; - private static final String EXPORT_TASK = "/indexer/export_task.json"; - // Time in ms to sleep after updating role permissions in each test. This intends to give the - // underlying test cluster enough time to sync permissions and be ready when test execution starts. - private static final int SYNC_SLEEP = 10000; + private SecurityClient securityClient; + private EmbeddedServiceClient userClient; + + // Indexer with 2 slots, each with 150MB memory since minimum required memory + // computed for the required tests is 133MB + private final EmbeddedIndexer indexer = new EmbeddedIndexer() + .setServerMemory(300_000_000) + .addProperty("druid.worker.capacity", "2"); + private final EmbeddedOverlord overlord = new EmbeddedOverlord(); + private final MSQExportDirectory exportDirectory = new MSQExportDirectory(); + + @Override + protected EmbeddedDruidCluster createCluster() + { + return EmbeddedDruidCluster + .withEmbeddedDerbyAndZookeeper() + .useLatchableEmitter() + .addResource(exportDirectory) + .addResource(new EmbeddedBasicAuthResource()) + .addServer(new EmbeddedCoordinator()) + .addServer(overlord) + .addServer(indexer) + .addServer(new EmbeddedBroker()) + .addExtensions( + MSQSqlModule.class, + MSQIndexingModule.class, + SqlTaskModule.class, + MSQDurableStorageModule.class, + MSQExternalDataSourceModule.class, + IndexerMemoryManagementModule.class + ) + .addCommonProperty("druid.auth.basic.common.pollingPeriod", "100"); + } + + @BeforeAll + public void setupClient() + { + // Make a custom client for testing out auth for the test user + userClient = EmbeddedServiceClient.create( + cluster, + new BasicHTTPEscalator("basic", USER_1, new DefaultPasswordProvider(USER_1_PASSWORD)) + ); + + // Use the default set of clients for calling security APIs + securityClient = new SecurityClient(cluster.callApi().serviceClient()); + } - @Before - public void setUp() throws IOException + @BeforeEach + public void setupRoles() { // Authentication setup securityClient.createAuthenticationUser(USER_1); @@ -91,8 +123,8 @@ public void setUp() throws IOException securityClient.assignUserToRole(USER_1, ROLE_1); } - @After - public void tearDown() throws Exception + @AfterEach + public void tearDownRoles() { securityClient.deleteAuthenticationUser(USER_1); securityClient.deleteAuthorizerUser(USER_1); @@ -100,14 +132,11 @@ public void tearDown() throws Exception } @Test - public void testIngestionWithoutPermissions() throws Exception + public void testIngestionWithoutPermissions() { List permissions = ImmutableList.of(); securityClient.setPermissionsToRole(ROLE_1, permissions); - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - String queryLocal = StringUtils.format( "INSERT INTO %s\n" @@ -138,27 +167,21 @@ public void testIngestionWithoutPermissions() throws Exception + " regionIsoCode\n" + "FROM TABLE(\n" + " EXTERN(\n" - + " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n" + + " '{\"type\":\"local\",\"files\":[\"%s\"]}',\n" + " '{\"type\":\"json\"}',\n" + " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n" + " )\n" + ")\n" + "PARTITIONED BY DAY\n", - "dst" + dataSource, + Resources.DataFile.tinyWiki1Json().getAbsolutePath() ); - // Submit the task and wait for the datasource to get loaded - StatusResponseHolder statusResponseHolder = msqHelper.submitMsqTask( - new SqlQuery(queryLocal, null, false, false, false, ImmutableMap.of(), ImmutableList.of()), - USER_1, - USER_1_PASSWORD - ); - - Assert.assertEquals(HttpResponseStatus.FORBIDDEN, statusResponseHolder.getStatus()); + verifySqlSubmitFailsWith403Forbidden(queryLocal); } @Test - public void testIngestionWithPermissions() throws Exception + public void testIngestionWithPermissions() { List permissions = ImmutableList.of( new ResourceAction(new Resource(".*", "DATASOURCE"), Action.READ), @@ -168,9 +191,6 @@ public void testIngestionWithPermissions() throws Exception ); securityClient.setPermissionsToRole(ROLE_1, permissions); - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - String queryLocal = StringUtils.format( "INSERT INTO %s\n" @@ -201,27 +221,26 @@ public void testIngestionWithPermissions() throws Exception + " regionIsoCode\n" + "FROM TABLE(\n" + " EXTERN(\n" - + " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n" + + " '{\"type\":\"local\",\"files\":[\"%s\"]}',\n" + " '{\"type\":\"json\"}',\n" + " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n" + " )\n" + ")\n" + "PARTITIONED BY DAY\n", - "dst" + dataSource, + Resources.DataFile.tinyWiki1Json().getAbsolutePath() ); - // Submit the task and wait for the datasource to get loaded - StatusResponseHolder statusResponseHolder = msqHelper.submitMsqTask( - new SqlQuery(queryLocal, null, false, false, false, ImmutableMap.of(), ImmutableList.of()), - USER_1, - USER_1_PASSWORD + final SqlTaskStatus taskStatus = userClient.onAnyBroker( + b -> b.submitSqlTask( + new ClientSqlQuery(queryLocal, null, false, false, false, Map.of(), List.of()) + ) ); - - Assert.assertEquals(HttpResponseStatus.ACCEPTED, statusResponseHolder.getStatus()); + cluster.callApi().waitForTaskToSucceed(taskStatus.getTaskId(), overlord); } @Test - public void testExportWithoutPermissions() throws IOException, ExecutionException, InterruptedException + public void testExportWithoutPermissions() { // No external write permissions for s3 List permissions = ImmutableList.of( @@ -233,9 +252,6 @@ public void testExportWithoutPermissions() throws IOException, ExecutionExceptio ); securityClient.setPermissionsToRole(ROLE_1, permissions); - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - String exportQuery = StringUtils.format( "INSERT INTO extern(%s(exportPath => '%s'))\n" @@ -243,25 +259,21 @@ public void testExportWithoutPermissions() throws IOException, ExecutionExceptio + "SELECT page, added, delta\n" + "FROM TABLE(\n" + " EXTERN(\n" - + " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n" + + " '{\"type\":\"local\",\"files\":[\"%s\"]}',\n" + " '{\"type\":\"json\"}',\n" + " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n" + " )\n" + ")\n", - LocalFileExportStorageProvider.TYPE_NAME, "/shared/export/" + LocalFileExportStorageProvider.TYPE_NAME, + cluster.getTestFolder().getOrCreateFolder("msq-export").getAbsolutePath(), + Resources.DataFile.tinyWiki1Json().getAbsolutePath() ); - StatusResponseHolder statusResponseHolder = msqHelper.submitMsqTask( - new SqlQuery(exportQuery, null, false, false, false, ImmutableMap.of(), ImmutableList.of()), - USER_1, - USER_1_PASSWORD - ); - - Assert.assertEquals(HttpResponseStatus.FORBIDDEN, statusResponseHolder.getStatus()); + verifySqlSubmitFailsWith403Forbidden(exportQuery); } @Test - public void testExportWithPermissions() throws IOException, ExecutionException, InterruptedException + public void testExportWithPermissions() { // No external write permissions for s3 List permissions = ImmutableList.of( @@ -273,9 +285,6 @@ public void testExportWithPermissions() throws IOException, ExecutionException, ); securityClient.setPermissionsToRole(ROLE_1, permissions); - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - String exportQuery = StringUtils.format( "INSERT INTO extern(%s(exportPath => '%s'))\n" @@ -283,69 +292,36 @@ public void testExportWithPermissions() throws IOException, ExecutionException, + "SELECT page, added, delta\n" + "FROM TABLE(\n" + " EXTERN(\n" - + " '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n" + + " '{\"type\":\"local\",\"files\":[\"%s\"]}',\n" + " '{\"type\":\"json\"}',\n" + " '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n" + " )\n" + ")\n", - LocalFileExportStorageProvider.TYPE_NAME, "/shared/export/" + LocalFileExportStorageProvider.TYPE_NAME, + new File(exportDirectory.get(), dataSource).getAbsolutePath(), + Resources.DataFile.tinyWiki1Json() ); - StatusResponseHolder statusResponseHolder = msqHelper.submitMsqTask( - new SqlQuery(exportQuery, null, false, false, false, ImmutableMap.of(), ImmutableList.of()), - USER_1, - USER_1_PASSWORD - ); - - Assert.assertEquals(HttpResponseStatus.ACCEPTED, statusResponseHolder.getStatus()); - } - - @Test - public void testExportTaskSubmitOverlordWithPermission() throws Exception - { - // No external write permissions for s3 - List permissions = ImmutableList.of( - new ResourceAction(new Resource(".*", "DATASOURCE"), Action.READ), - new ResourceAction(new Resource("EXTERNAL", "EXTERNAL"), Action.READ), - new ResourceAction(new Resource(LocalFileExportStorageProvider.TYPE_NAME, "EXTERNAL"), Action.WRITE), - new ResourceAction(new Resource("STATE", "STATE"), Action.READ), - new ResourceAction(new Resource(".*", "DATASOURCE"), Action.WRITE) + final SqlTaskStatus taskStatus = userClient.onAnyBroker( + b -> b.submitSqlTask( + new ClientSqlQuery(exportQuery, null, false, false, false, Map.of(), List.of()) + ) ); - securityClient.setPermissionsToRole(ROLE_1, permissions); - - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - - String task = createTaskString(); - StatusResponseHolder statusResponseHolder = overlordResourceTestClient.submitTaskAndReturnStatusWithAuth(task, USER_1, USER_1_PASSWORD); - Assert.assertEquals(HttpResponseStatus.OK, statusResponseHolder.getStatus()); + cluster.callApi().waitForTaskToSucceed(taskStatus.getTaskId(), overlord); } - @Test - public void testExportTaskSubmitOverlordWithoutPermission() throws Exception + private void verifySqlSubmitFailsWith403Forbidden(String sql) { - // No external write permissions for s3 - List permissions = ImmutableList.of( - new ResourceAction(new Resource(".*", "DATASOURCE"), Action.READ), - new ResourceAction(new Resource("EXTERNAL", "EXTERNAL"), Action.READ), - new ResourceAction(new Resource(S3ExportStorageProvider.TYPE_NAME, "EXTERNAL"), Action.WRITE), - new ResourceAction(new Resource("STATE", "STATE"), Action.READ), - new ResourceAction(new Resource(".*", "DATASOURCE"), Action.WRITE) + MatcherAssert.assertThat( + Assertions.assertThrows( + Exception.class, + () -> userClient.onAnyBroker( + b -> b.submitSqlTask( + new ClientSqlQuery(sql, null, false, false, false, Map.of(), List.of()) + ) + ) + ), + ExceptionMatcher.of(Exception.class).expectMessageContains("403 Forbidden") ); - securityClient.setPermissionsToRole(ROLE_1, permissions); - - // Allow permissions sync across cluster to avoid flakes - Thread.sleep(SYNC_SLEEP); - - String task = createTaskString(); - StatusResponseHolder statusResponseHolder = overlordResourceTestClient.submitTaskAndReturnStatusWithAuth(task, USER_1, USER_1_PASSWORD); - Assert.assertEquals(HttpResponseStatus.FORBIDDEN, statusResponseHolder.getStatus()); - } - - private String createTaskString() throws Exception - { - String queryId = IdUtils.newTaskId(MSQControllerTask.TYPE, "external", null); - String template = AbstractIndexerTest.getResourceAsString(EXPORT_TASK); - return StringUtils.replace(template, "%%QUERY_ID%%", queryId); } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/EmbeddedBasicAuthResource.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/EmbeddedBasicAuthResource.java new file mode 100644 index 000000000000..3a54954287c3 --- /dev/null +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/EmbeddedBasicAuthResource.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded.auth; + +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.security.basic.BasicSecurityDruidModule; +import org.apache.druid.testing.embedded.EmbeddedDruidCluster; +import org.apache.druid.testing.embedded.EmbeddedResource; + +/** + * Resource to enable the basic auth extension in embedded tests. + */ +public class EmbeddedBasicAuthResource implements EmbeddedResource +{ + public static final String ADMIN_PASSWORD = "priest"; + public static final String SYSTEM_PASSWORD = "warlock"; + public static final String SYSTEM_USER = "druid_system"; + + private static final String AUTHORIZER_NAME = "basic"; + private static final String AUTHENTICATOR_NAME = "basic"; + + @Override + public void start() + { + // Do nothing + } + + @Override + public void onStarted(EmbeddedDruidCluster cluster) + { + cluster + .addExtension(BasicSecurityDruidModule.class) + .addCommonProperty("druid.auth.authenticatorChain", StringUtils.format("[\"%s\"]", AUTHENTICATOR_NAME)) + .addCommonProperty(authenticatorProp("type"), "basic") + .addCommonProperty(authenticatorProp("initialAdminPassword"), ADMIN_PASSWORD) + .addCommonProperty(authenticatorProp("initialInternalClientPassword"), SYSTEM_PASSWORD) + .addCommonProperty(authenticatorProp("authorizerName"), AUTHORIZER_NAME) + .addCommonProperty("druid.auth.authorizers", StringUtils.format("[\"%s\"]", AUTHORIZER_NAME)) + .addCommonProperty(authorizerProp("type"), "basic") + .addCommonProperty(escalatorProp("type"), "basic") + .addCommonProperty(escalatorProp("internalClientPassword"), SYSTEM_PASSWORD) + .addCommonProperty(escalatorProp("internalClientUsername"), SYSTEM_USER) + .addCommonProperty(escalatorProp("authorizerName"), AUTHORIZER_NAME); + } + + @Override + public void stop() + { + // Do nothing + } + + private String authenticatorProp(String name) + { + return StringUtils.format("druid.auth.authenticator.%s.%s", AUTHENTICATOR_NAME, name); + } + + private String authorizerProp(String name) + { + return StringUtils.format("druid.auth.authorizer.%s.%s", AUTHORIZER_NAME, name); + } + + private String escalatorProp(String name) + { + return StringUtils.format("druid.escalator.%s", name); + } +} diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/SecurityClient.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/SecurityClient.java new file mode 100644 index 000000000000..104194f18abc --- /dev/null +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/auth/SecurityClient.java @@ -0,0 +1,171 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded.auth; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.server.security.ResourceAction; +import org.apache.druid.testing.embedded.EmbeddedServiceClient; +import org.jboss.netty.handler.codec.http.HttpMethod; + +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +/** + * Client to call various basic auth APIs on the Coordinator. + */ +public class SecurityClient +{ + private static final String AUTHENTICATOR_URL = "/druid-ext/basic-security/authentication/db/basic"; + private static final String AUTHORIZER_URL = "/druid-ext/basic-security/authorization/db/basic"; + + private final EmbeddedServiceClient clients; + + SecurityClient(EmbeddedServiceClient clients) + { + this.clients = clients; + } + + public void createAuthenticationUser(String username) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/users/%s", + AUTHENTICATOR_URL, + StringUtils.urlEncode(username) + ) + ); + sendRequest(mapper -> request); + } + + public void deleteAuthenticationUser(String username) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.DELETE, + StringUtils.format( + "%s/users/%s", + AUTHENTICATOR_URL, + StringUtils.urlEncode(username) + ) + ); + sendRequest(mapper -> request); + } + + public void setUserPassword(String username, String password) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/users/%s/credentials", + AUTHENTICATOR_URL, + StringUtils.urlEncode(username) + ) + ); + + sendRequest(mapper -> request.jsonContent(mapper, Map.of("password", password))); + } + + public void createAuthorizerUser(String username) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/users/%s", + AUTHORIZER_URL, + StringUtils.urlEncode(username) + ) + ); + sendRequest(mapper -> request); + } + + public void deleteAuthorizerUser(String username) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.DELETE, + StringUtils.format( + "%s/users/%s", + AUTHORIZER_URL, + StringUtils.urlEncode(username) + ) + ); + sendRequest(mapper -> request); + } + + public void createAuthorizerRole(String role) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/roles/%s", + AUTHORIZER_URL, + StringUtils.urlEncode(role) + ) + ); + sendRequest(mapper -> request); + } + + public void deleteAuthorizerRole(String role) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.DELETE, + StringUtils.format( + "%s/roles/%s", + AUTHORIZER_URL, + StringUtils.urlEncode(role) + ) + ); + sendRequest(mapper -> request); + } + + public void assignUserToRole(String user, String role) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/users/%s/roles/%s", + AUTHORIZER_URL, + StringUtils.urlEncode(user), + StringUtils.urlEncode(role) + ) + ); + sendRequest(mapper -> request); + } + + public void setPermissionsToRole(String role, List permissions) + { + final RequestBuilder request = new RequestBuilder( + HttpMethod.POST, + StringUtils.format( + "%s/roles/%s/permissions/", + AUTHORIZER_URL, + StringUtils.urlEncode(role) + ) + ); + sendRequest(mapper -> request.jsonContent(mapper, permissions)); + } + + private void sendRequest(Function request) + { + clients.onLeaderCoordinator(request, null); + } +} diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java index c867305e7221..29991844f03a 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java @@ -91,6 +91,7 @@ import org.joda.time.Period; import org.joda.time.chrono.ISOChronology; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -213,13 +214,18 @@ protected EmbeddedDruidCluster createCluster() .addServer(new EmbeddedRouter()); } - protected final CompactionResourceTestClient compactionResource = - new CompactionResourceTestClient(coordinator, overlord); + protected CompactionResourceTestClient compactionResource; private String fullDatasourceName; + @BeforeAll + public void setupClient() + { + this.compactionResource = new CompactionResourceTestClient(cluster); + } + @BeforeEach - public void resetCompactionTaskSlots() throws Exception + public void resetCompactionTaskSlots() { // Set compaction slot to 5 updateCompactionTaskSlot(0.5, 10); @@ -574,7 +580,6 @@ public void testAutoCompactionDutySubmitAndVerifyCompaction() throws Exception verifySegmentIntervals(intervalsBeforeCompaction); getAndAssertCompactionStatus( fullDatasourceName, - AutoCompactionSnapshot.ScheduleStatus.RUNNING, Matchers.equalTo(0L), Matchers.greaterThan(0L), Matchers.greaterThan(0L), @@ -592,7 +597,6 @@ public void testAutoCompactionDutySubmitAndVerifyCompaction() throws Exception verifySegmentIntervals(intervalsBeforeCompaction); getAndAssertCompactionStatus( fullDatasourceName, - AutoCompactionSnapshot.ScheduleStatus.RUNNING, Matchers.equalTo(0L), Matchers.greaterThan(0L), Matchers.equalTo(0L), @@ -636,7 +640,7 @@ public void testAutoCompactionDutyCanUpdateCompactionConfig(CompactionEngine eng LOG.info("Auto compaction test with hash partitioning"); final HashedPartitionsSpec hashedPartitionsSpec = new HashedPartitionsSpec(null, 3, null); - submitCompactionConfig(hashedPartitionsSpec, NO_SKIP_OFFSET, 1, null, null, null, null, false, engine); + submitCompactionConfig(hashedPartitionsSpec, NO_SKIP_OFFSET, null, null, null, null, false, engine); // 3 segments for both 2013-08-31 and 2013-09-01. (Note that numShards guarantees max shards but not exact // number of final shards, since some shards may end up empty.) forceTriggerAutoCompaction(6); @@ -663,7 +667,7 @@ public void testAutoCompactionDutyCanUpdateCompactionConfig(CompactionEngine eng false ); } - submitCompactionConfig(inputRangePartitionsSpec, NO_SKIP_OFFSET, 1, null, null, null, null, false, engine); + submitCompactionConfig(inputRangePartitionsSpec, NO_SKIP_OFFSET, null, null, null, null, false, engine); forceTriggerAutoCompaction(2); verifyQuery(INDEX_QUERIES_RESOURCE); verifySegmentsCompacted(expectedRangePartitionsSpec, 2); @@ -725,7 +729,6 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception verifySegmentIntervals(intervalsBeforeCompaction); getAndAssertCompactionStatus( fullDatasourceName, - AutoCompactionSnapshot.ScheduleStatus.RUNNING, Matchers.greaterThan(0L), Matchers.greaterThan(0L), Matchers.equalTo(0L), @@ -747,7 +750,6 @@ public void testAutoCompactionDutyCanUpdateTaskSlots() throws Exception verifySegmentIntervals(intervalsBeforeCompaction); getAndAssertCompactionStatus( fullDatasourceName, - AutoCompactionSnapshot.ScheduleStatus.RUNNING, Matchers.equalTo(0L), Matchers.greaterThan(0L), Matchers.equalTo(0L), @@ -1315,7 +1317,7 @@ public void testAutoCompactionDutyWithSegmentGranularityFinerAndNotAlignWithSegm List tasks = getCompleteTasksForDataSource(fullDatasourceName); TaskStatusPlus compactTask = null; for (TaskStatusPlus task : tasks) { - if (task.getType().equals("compact")) { + if ("compact".equals(task.getType())) { compactTask = task; } } @@ -1359,7 +1361,7 @@ public void testAutoCompactionDutyWithSegmentGranularityCoarserAndNotAlignWithSe List tasks = getCompleteTasksForDataSource(fullDatasourceName); TaskStatusPlus compactTask = null; for (TaskStatusPlus task : tasks) { - if (task.getType().equals("compact")) { + if ("compact".equals(task.getType())) { compactTask = task; } } @@ -1665,7 +1667,7 @@ private void verifyScanResult(String field, String result) ); } - private void updateClusterConfig(ClusterCompactionConfig clusterConfig) throws Exception + private void updateClusterConfig(ClusterCompactionConfig clusterConfig) { compactionResource.updateClusterConfig(clusterConfig); LOG.info("Updated cluster config to [%s]", clusterConfig); @@ -1675,7 +1677,7 @@ private void submitCompactionConfig( Integer maxRowsPerSegment, Period skipOffsetFromLatest, CompactionEngine engine - ) throws Exception + ) { submitCompactionConfig(maxRowsPerSegment, skipOffsetFromLatest, null, engine); } @@ -1685,7 +1687,7 @@ private void submitCompactionConfig( Period skipOffsetFromLatest, UserCompactionTaskGranularityConfig granularitySpec, CompactionEngine engine - ) throws Exception + ) { submitCompactionConfig(maxRowsPerSegment, skipOffsetFromLatest, granularitySpec, false, engine); } @@ -1696,7 +1698,7 @@ private void submitCompactionConfig( UserCompactionTaskGranularityConfig granularitySpec, boolean dropExisting, CompactionEngine engine - ) throws Exception + ) { submitCompactionConfig( maxRowsPerSegment, @@ -1719,12 +1721,11 @@ private void submitCompactionConfig( AggregatorFactory[] metricsSpec, boolean dropExisting, CompactionEngine engine - ) throws Exception + ) { submitCompactionConfig( new DynamicPartitionsSpec(maxRowsPerSegment, null), skipOffsetFromLatest, - 1, granularitySpec, dimensionsSpec, transformSpec, @@ -1737,14 +1738,13 @@ private void submitCompactionConfig( private void submitCompactionConfig( PartitionsSpec partitionsSpec, Period skipOffsetFromLatest, - int maxNumConcurrentSubTasks, UserCompactionTaskGranularityConfig granularitySpec, UserCompactionTaskDimensionsConfig dimensionsSpec, CompactionTransformSpec transformSpec, AggregatorFactory[] metricsSpec, boolean dropExisting, CompactionEngine engine - ) throws Exception + ) { DataSourceCompactionConfig dataSourceCompactionConfig = InlineSchemaDataSourceCompactionConfig.builder() @@ -1763,7 +1763,7 @@ private void submitCompactionConfig( null, null, null, - maxNumConcurrentSubTasks, + 1, null, null, null, @@ -1794,7 +1794,7 @@ private void submitCompactionConfig( Assertions.assertEquals(foundDataSourceCompactionConfig.getSkipOffsetFromLatest(), skipOffsetFromLatest); } - private void deleteCompactionConfig() throws Exception + private void deleteCompactionConfig() { compactionResource.deleteDataSourceCompactionConfig(fullDatasourceName); @@ -1842,7 +1842,7 @@ private void forceTriggerAutoCompaction( } } - private void forceTriggerAutoCompaction(int numExpectedSegmentsAfterCompaction) throws Exception + private void forceTriggerAutoCompaction(int numExpectedSegmentsAfterCompaction) { compactionResource.forceTriggerAutoCompaction(); waitForCompactionToFinish(numExpectedSegmentsAfterCompaction); @@ -1850,7 +1850,7 @@ private void forceTriggerAutoCompaction(int numExpectedSegmentsAfterCompaction) private void waitForCompactionToFinish(int numExpectedSegmentsAfterCompaction) { - final Set taskIds = getTaskIdsForState(null, dataSource); + final Set taskIds = getTaskIdsForState(dataSource); for (String taskId : taskIds) { cluster.callApi().waitForTaskToSucceed(taskId, overlord); } @@ -1918,7 +1918,7 @@ private void verifySegmentsCompactedDimensionSchema(List dimens } } - private void updateCompactionTaskSlot(double compactionTaskSlotRatio, int maxCompactionTaskSlots) throws Exception + private void updateCompactionTaskSlot(double compactionTaskSlotRatio, int maxCompactionTaskSlots) { final ClusterCompactionConfig oldConfig = compactionResource.getClusterConfig(); compactionResource.updateClusterConfig( @@ -1943,7 +1943,6 @@ private void updateCompactionTaskSlot(double compactionTaskSlotRatio, int maxCom private void getAndAssertCompactionStatus( String fullDatasourceName, - AutoCompactionSnapshot.ScheduleStatus scheduleStatus, Matcher bytesAwaitingCompactionMatcher, Matcher bytesCompactedMatcher, Matcher bytesSkippedMatcher, @@ -1953,11 +1952,11 @@ private void getAndAssertCompactionStatus( long intervalCountAwaitingCompaction, long intervalCountCompacted, long intervalCountSkipped - ) throws Exception + ) { AutoCompactionSnapshot actualStatus = compactionResource.getCompactionStatus(fullDatasourceName); Assertions.assertNotNull(actualStatus); - Assertions.assertEquals(actualStatus.getScheduleStatus(), scheduleStatus); + Assertions.assertEquals(actualStatus.getScheduleStatus(), AutoCompactionSnapshot.ScheduleStatus.RUNNING); MatcherAssert.assertThat(actualStatus.getBytesAwaitingCompaction(), bytesAwaitingCompactionMatcher); MatcherAssert.assertThat(actualStatus.getBytesCompacted(), bytesCompactedMatcher); MatcherAssert.assertThat(actualStatus.getBytesSkipped(), bytesSkippedMatcher); @@ -1983,10 +1982,10 @@ private TaskPayloadResponse getTaskPayload(String taskId) return cluster.callApi().onLeaderOverlord(o -> o.taskPayload(taskId)); } - private Set getTaskIdsForState(String state, String dataSource) + private Set getTaskIdsForState(String dataSource) { return ImmutableList.copyOf( - (CloseableIterator) cluster.callApi().onLeaderOverlord(o -> o.taskStatuses(state, dataSource, 0)) + (CloseableIterator) cluster.callApi().onLeaderOverlord(o -> o.taskStatuses(null, dataSource, 0)) ).stream().map(TaskStatusPlus::getId).collect(Collectors.toSet()); } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionUpgradeTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionUpgradeTest.java index 22155c6745e6..708ad28607e2 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionUpgradeTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionUpgradeTest.java @@ -37,6 +37,7 @@ import org.apache.druid.testing.embedded.junit5.EmbeddedClusterTestBase; import org.joda.time.Period; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; @@ -47,8 +48,7 @@ public class AutoCompactionUpgradeTest extends EmbeddedClusterTestBase private final EmbeddedCoordinator coordinator = new EmbeddedCoordinator() .addProperty("druid.manager.segments.useIncrementalCache", "always"); - protected CompactionResourceTestClient compactionResource = - new CompactionResourceTestClient(coordinator, overlord); + protected CompactionResourceTestClient compactionResource; @Override protected EmbeddedDruidCluster createCluster() @@ -57,6 +57,12 @@ protected EmbeddedDruidCluster createCluster() .addServer(coordinator); } + @BeforeAll + public void setupClient() + { + compactionResource = new CompactionResourceTestClient(cluster); + } + @Test public void test_configCanBeUpdated_afterVersionUpgrades() throws Exception { diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionResourceTestClient.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionResourceTestClient.java index aafcf14a2bb9..4e9016b33283 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionResourceTestClient.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionResourceTestClient.java @@ -20,32 +20,24 @@ package org.apache.druid.testing.embedded.compact; import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.indexing.overlord.http.CompactionConfigsResponse; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.java.util.http.client.HttpClient; -import org.apache.druid.java.util.http.client.Request; -import org.apache.druid.java.util.http.client.response.StatusResponseHandler; -import org.apache.druid.java.util.http.client.response.StatusResponseHolder; -import org.apache.druid.query.aggregation.datasketches.hll.HllSketchModule; -import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchModule; -import org.apache.druid.query.aggregation.datasketches.theta.SketchModule; -import org.apache.druid.segment.TestHelper; +import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.compaction.CompactionSimulateResult; import org.apache.druid.server.compaction.CompactionStatusResponse; import org.apache.druid.server.coordinator.AutoCompactionSnapshot; import org.apache.druid.server.coordinator.ClusterCompactionConfig; import org.apache.druid.server.coordinator.DataSourceCompactionConfig; -import org.apache.druid.testing.embedded.EmbeddedCoordinator; -import org.apache.druid.testing.embedded.EmbeddedOverlord; +import org.apache.druid.server.http.ServletResourceUtils; +import org.apache.druid.testing.embedded.EmbeddedDruidCluster; +import org.apache.druid.testing.embedded.EmbeddedServiceClient; import org.jboss.netty.handler.codec.http.HttpMethod; -import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import java.net.URL; import java.util.List; import java.util.Map; +import java.util.Objects; /** * The methods in this class should eventually be updated to use @@ -55,133 +47,72 @@ public class CompactionResourceTestClient { private static final Logger log = new Logger(CompactionResourceTestClient.class); - private final ObjectMapper jsonMapper; - private final EmbeddedOverlord overlord; - private final EmbeddedCoordinator coordinator; - private final StatusResponseHandler responseHandler; + private final EmbeddedServiceClient client; - CompactionResourceTestClient( - EmbeddedCoordinator coordinator, - EmbeddedOverlord overlord - ) + CompactionResourceTestClient(EmbeddedDruidCluster cluster) { - this.jsonMapper = TestHelper.JSON_MAPPER - .registerModules(new SketchModule().getJacksonModules()) - .registerModules(new HllSketchModule().getJacksonModules()) - .registerModules(new DoublesSketchModule().getJacksonModules()); - this.overlord = overlord; - this.coordinator = coordinator; - this.responseHandler = StatusResponseHandler.getInstance(); - } - - private HttpClient httpClient() - { - return overlord.bindings().escalatedHttpClient(); + this.client = cluster.callApi().serviceClient(); } private String getCoordinatorURL() { - return StringUtils.format( - "http://%s:%s/druid/coordinator/v1/", - coordinator.bindings().selfNode().getHost(), - coordinator.bindings().selfNode().getPlaintextPort() - ); + return "/druid/coordinator/v1"; } private String getOverlordURL() { - return StringUtils.format( - "http://%s:%s/druid/indexer/v1", - overlord.bindings().selfNode().getHost(), - overlord.bindings().selfNode().getPlaintextPort() - ); + return "/druid/indexer/v1"; } - public void submitCompactionConfig(final DataSourceCompactionConfig dataSourceCompactionConfig) throws Exception + public void submitCompactionConfig(final DataSourceCompactionConfig dataSourceCompactionConfig) { final String dataSource = dataSourceCompactionConfig.getDataSource(); String url = StringUtils.format( "%s/compaction/config/datasources/%s", getOverlordURL(), StringUtils.urlEncode(dataSource) ); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.POST, new URL(url)).setContent( - "application/json", - jsonMapper.writeValueAsBytes(dataSourceCompactionConfig) + client.onLeaderOverlord( + mapper -> new RequestBuilder(HttpMethod.POST, url).jsonContent( + mapper, + dataSourceCompactionConfig ), - responseHandler - ).get(); - - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while submiting compaction config status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } - log.info( - "Submitted compaction config for datasource[%s] with response[%s]", - dataSource, response.getContent() + null ); + log.info("Submitted compaction config for datasource[%s]", dataSource); } - public void deleteDataSourceCompactionConfig(final String dataSource) throws Exception + public void deleteDataSourceCompactionConfig(final String dataSource) { String url = StringUtils.format( "%s/compaction/config/datasources/%s", getOverlordURL(), StringUtils.urlEncode(dataSource) ); - StatusResponseHolder response = httpClient().go(new Request(HttpMethod.DELETE, new URL(url)), responseHandler).get(); - - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while deleting compaction config status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } + client.onLeaderOverlord(mapper -> new RequestBuilder(HttpMethod.DELETE, url), null); } - public List getAllCompactionConfigs() throws Exception + public List getAllCompactionConfigs() { String url = StringUtils.format("%s/compaction/config/datasources", getOverlordURL()); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.GET, new URL(url)), responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while getting compaction config status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } - final CompactionConfigsResponse payload = jsonMapper.readValue( - response.getContent(), + final CompactionConfigsResponse payload = client.onLeaderOverlord( + mapper -> new RequestBuilder(HttpMethod.GET, url), new TypeReference<>() {} ); - return payload.getCompactionConfigs(); + return Objects.requireNonNull(payload).getCompactionConfigs(); } - public DataSourceCompactionConfig getDataSourceCompactionConfig(String dataSource) throws Exception + public DataSourceCompactionConfig getDataSourceCompactionConfig(String dataSource) { String url = StringUtils.format( "%s/compaction/config/datasources/%s", getOverlordURL(), StringUtils.urlEncode(dataSource) ); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.GET, new URL(url)), responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while getting compaction config status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } - return jsonMapper.readValue(response.getContent(), new TypeReference<>() {}); + return client.onLeaderOverlord( + mapper -> new RequestBuilder(HttpMethod.GET, url), + new TypeReference<>() {} + ); } - public void forceTriggerAutoCompaction() throws Exception + public void forceTriggerAutoCompaction() { // Perform a dummy update of task slots to force the coordinator to refresh its compaction config final ClusterCompactionConfig clusterConfig = getClusterConfig(); @@ -199,53 +130,18 @@ public void forceTriggerAutoCompaction() throws Exception simulateResult.getCompactionStates() ); - String url = StringUtils.format("%scompaction/compact", getCoordinatorURL()); - StatusResponseHolder response = httpClient().go(new Request(HttpMethod.POST, new URL(url)), responseHandler).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while force trigger auto compaction status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } + String url = StringUtils.format("%s/compaction/compact", getCoordinatorURL()); + client.onLeaderCoordinator(mapper -> new RequestBuilder(HttpMethod.POST, url), null); } - public void updateClusterConfig(ClusterCompactionConfig config) throws Exception + public void updateClusterConfig(ClusterCompactionConfig config) { - final String url = StringUtils.format( - "%s/compaction/config/cluster", - getOverlordURL() - ); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.POST, new URL(url)).setContent( - "application/json", - jsonMapper.writeValueAsBytes(config) - ), - responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while updating cluster compaction config, status[%s], content[%s]", - response.getStatus(), - response.getContent() - ); - } + client.onLeaderOverlord(o -> o.updateClusterCompactionConfig(config)); } - public ClusterCompactionConfig getClusterConfig() throws Exception + public ClusterCompactionConfig getClusterConfig() { - String url = StringUtils.format("%s/compaction/config/cluster", getOverlordURL()); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.GET, new URL(url)), responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while getting compaction config status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } - return jsonMapper.readValue(response.getContent(), new TypeReference<>() {}); + return client.onLeaderOverlord(OverlordClient::getClusterCompactionConfig); } /** @@ -253,78 +149,58 @@ public ClusterCompactionConfig getClusterConfig() throws Exception * For all other purposes, use {@link #updateClusterConfig}. */ @Deprecated - private void updateCompactionTaskSlot(Double compactionTaskSlotRatio, Integer maxCompactionTaskSlots) throws Exception + private void updateCompactionTaskSlot(Double compactionTaskSlotRatio, Integer maxCompactionTaskSlots) { final String url = StringUtils.format( - "%sconfig/compaction/taskslots?ratio=%s&max=%s", + "%s/config/compaction/taskslots?ratio=%s&max=%s", getCoordinatorURL(), StringUtils.urlEncode(compactionTaskSlotRatio.toString()), StringUtils.urlEncode(maxCompactionTaskSlots.toString()) ); - StatusResponseHolder response = httpClient().go(new Request(HttpMethod.POST, new URL(url)), responseHandler).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while updating compaction task slot status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } + client.onLeaderCoordinator(mapper -> new RequestBuilder(HttpMethod.POST, url), null); } - public Map getCompactionProgress(String dataSource) throws Exception + public Map getCompactionProgress(String dataSource) { - String url = StringUtils.format("%scompaction/progress?dataSource=%s", getCoordinatorURL(), StringUtils.urlEncode(dataSource)); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.GET, new URL(url)), responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while getting compaction progress status[%s] content[%s]", - response.getStatus(), - response.getContent() - ); - } - return jsonMapper.readValue(response.getContent(), new TypeReference<>() {}); + String url = StringUtils.format( + "%s/compaction/progress?dataSource=%s", + getCoordinatorURL(), + StringUtils.urlEncode(dataSource) + ); + return client.onLeaderCoordinator( + mapper -> new RequestBuilder(HttpMethod.GET, url), + new TypeReference<>() {} + ); } - public AutoCompactionSnapshot getCompactionStatus(String dataSource) throws Exception + public AutoCompactionSnapshot getCompactionStatus(String dataSource) { - String url = StringUtils.format("%scompaction/status?dataSource=%s", getCoordinatorURL(), StringUtils.urlEncode(dataSource)); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.GET, new URL(url)), responseHandler - ).get(); - if (response.getStatus().equals(HttpResponseStatus.NOT_FOUND)) { - return null; - } else if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while getting compaction status status[%s] content[%s]", - response.getStatus(), - response.getContent() + String url = StringUtils.format( + "%s/compaction/status?dataSource=%s", + getCoordinatorURL(), + StringUtils.urlEncode(dataSource) + ); + + try { + final CompactionStatusResponse latestSnapshots = client.onLeaderCoordinator( + mapper -> new RequestBuilder(HttpMethod.GET, url), + new TypeReference<>() {} ); + return Objects.requireNonNull(latestSnapshots).getLatestStatus().get(0); + } + catch (Exception e) { + return ServletResourceUtils.getDefaultValueIfCauseIs404ElseThrow(e, () -> null); } - final CompactionStatusResponse latestSnapshots = jsonMapper.readValue(response.getContent(), new TypeReference<>() {}); - return latestSnapshots.getLatestStatus().get(0); } - public CompactionSimulateResult simulateRunOnCoordinator() throws Exception + public CompactionSimulateResult simulateRunOnCoordinator() { final ClusterCompactionConfig clusterConfig = getClusterConfig(); - final String url = StringUtils.format("%scompaction/simulate", getCoordinatorURL()); - StatusResponseHolder response = httpClient().go( - new Request(HttpMethod.POST, new URL(url)).setContent( - "application/json", - jsonMapper.writeValueAsBytes(clusterConfig) - ), - responseHandler - ).get(); - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while running simulation on Coordinator: status[%s], content[%s]", - response.getStatus(), response.getContent() - ); - } - - return jsonMapper.readValue(response.getContent(), new TypeReference<>() {}); + final String url = StringUtils.format("%s/compaction/simulate", getCoordinatorURL()); + return client.onLeaderCoordinator( + mapper -> new RequestBuilder(HttpMethod.POST, url).jsonContent(mapper, clusterConfig), + new TypeReference<>() {} + ); } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionSparseColumnTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionSparseColumnTest.java index f819095b6496..2777d244609d 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionSparseColumnTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionSparseColumnTest.java @@ -22,7 +22,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.indexer.partitions.DynamicPartitionsSpec; import org.apache.druid.indexer.partitions.HashedPartitionsSpec; import org.apache.druid.indexing.common.task.TaskBuilder; @@ -238,8 +237,7 @@ private void verifyQueryResult( */ private String getScanEvents(ScanQuery scanQuery) { - final String resultAsJson = - FutureUtils.getUnchecked(cluster.anyBroker().submitNativeQuery(scanQuery), true); + final String resultAsJson = cluster.callApi().onAnyBroker(b -> b.submitNativeQuery(scanQuery)); final List> resultList = JacksonUtils.readValue( TestHelper.JSON_MAPPER, resultAsJson.getBytes(StandardCharsets.UTF_8), diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionTaskTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionTaskTest.java index a9ad05ca2b53..99eb428cf9c7 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionTaskTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionTaskTest.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import org.apache.druid.client.indexing.ClientCompactionTaskGranularitySpec; -import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.indexer.partitions.HashedPartitionsSpec; import org.apache.druid.indexer.report.IngestionStatsAndErrors; import org.apache.druid.indexer.report.IngestionStatsAndErrorsTaskReport; @@ -321,10 +320,7 @@ private void verifySegmentsHaveQueryGranularity(String expectedQueryGranularity, .intervals("2013-08-31/2013-09-02") .build(); - final String resultAsJson = FutureUtils.getUnchecked( - cluster.anyBroker().submitNativeQuery(query), - true - ); + final String resultAsJson = cluster.callApi().onAnyBroker(b -> b.submitNativeQuery(query)); // Trim the result so that it contains only the `queryGranularity` fields final List> resultList = JacksonUtils.readValue( diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/IndexParallelTaskTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/IndexParallelTaskTest.java index e97c9b977473..8127e33027fc 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/IndexParallelTaskTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/IndexParallelTaskTest.java @@ -100,7 +100,7 @@ public void test_segmentAvailabilityIsConfirmed_whenTaskWaits5secondsForHandoff( .dataSource(dataSource) .timestampColumn("timestamp") .jsonInputFormat() - .localInputSourceWithFiles(Resources.DataFile.TINY_WIKI_1_JSON) + .localInputSourceWithFiles(Resources.DataFile.tinyWiki1Json()) .dimensions() .tuningConfig( t -> t.withAwaitSegmentAvailabilityTimeoutMillis(segmentAvailabilityTimeoutMillis) @@ -135,9 +135,9 @@ public void test_runIndexTask_andReindexIntoAnotherDatasource(PartitionsSpec par .timestampColumn("timestamp") .jsonInputFormat() .localInputSourceWithFiles( - Resources.DataFile.TINY_WIKI_1_JSON, - Resources.DataFile.TINY_WIKI_2_JSON, - Resources.DataFile.TINY_WIKI_3_JSON + Resources.DataFile.tinyWiki1Json(), + Resources.DataFile.tinyWiki2Json(), + Resources.DataFile.tinyWiki3Json() ) .segmentGranularity("DAY") .dimensions("namespace", "page", "language") diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/MoreResources.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/MoreResources.java index 539ec5d0680b..57a18694e10d 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/MoreResources.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/MoreResources.java @@ -44,9 +44,9 @@ public static class Task .ofTypeIndex() .jsonInputFormat() .localInputSourceWithFiles( - Resources.DataFile.TINY_WIKI_1_JSON, - Resources.DataFile.TINY_WIKI_2_JSON, - Resources.DataFile.TINY_WIKI_3_JSON + Resources.DataFile.tinyWiki1Json(), + Resources.DataFile.tinyWiki2Json(), + Resources.DataFile.tinyWiki3Json() ) .timestampColumn("timestamp") .dimensions( diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/BaseRealtimeQueryTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/BaseRealtimeQueryTest.java index 42baf6a86623..bce1362254f1 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/BaseRealtimeQueryTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/BaseRealtimeQueryTest.java @@ -134,9 +134,10 @@ void tearDownEach() throws ExecutionException, InterruptedException, IOException Assertions.assertEquals(Map.of("id", dataSource), terminateSupervisorResult); // Cancel all running tasks, so we don't need to wait for them to hand off their segments. - try (final CloseableIterator it = cluster.leaderOverlord().taskStatuses(null, null, null).get()) { + try (final CloseableIterator it + = cluster.callApi().onLeaderOverlord(o -> o.taskStatuses(null, null, null))) { while (it.hasNext()) { - cluster.leaderOverlord().cancelTask(it.next().getId()); + cluster.callApi().onLeaderOverlord(o -> o.cancelTask(it.next().getId())); } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/EmbeddedMSQApis.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/EmbeddedMSQApis.java index 4c002039f4b1..71967443d40c 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/EmbeddedMSQApis.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/EmbeddedMSQApis.java @@ -19,7 +19,6 @@ package org.apache.druid.testing.embedded.msq; -import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.error.DruidException; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.report.TaskReport; @@ -62,8 +61,8 @@ public EmbeddedMSQApis(EmbeddedDruidCluster cluster, EmbeddedOverlord overlord) */ public String runDartSql(String sql, Object... args) { - return FutureUtils.getUnchecked( - cluster.anyBroker().submitSqlQuery( + return cluster.callApi().onAnyBroker( + b -> b.submitSqlQuery( new ClientSqlQuery( StringUtils.format(sql, args), ResultFormat.CSV.name(), @@ -73,8 +72,7 @@ public String runDartSql(String sql, Object... args) Map.of(QueryContexts.ENGINE, DartSqlEngine.NAME), null ) - ), - true + ) ).trim(); } @@ -87,8 +85,8 @@ public String runDartSql(String sql, Object... args) public SqlTaskStatus submitTaskSql(String sql, Object... args) { final SqlTaskStatus taskStatus = - FutureUtils.getUnchecked( - cluster.anyBroker().submitSqlTask( + cluster.callApi().onAnyBroker( + b -> b.submitSqlTask( new ClientSqlQuery( StringUtils.format(sql, args), ResultFormat.CSV.name(), @@ -98,8 +96,7 @@ public SqlTaskStatus submitTaskSql(String sql, Object... args) null, null ) - ), - true + ) ); if (taskStatus.getState() != TaskState.RUNNING) { @@ -125,9 +122,8 @@ public MSQTaskReportPayload runTaskSqlAndGetReport(String sql, Object... args) cluster.callApi().waitForTaskToSucceed(taskStatus.getTaskId(), overlord); - final TaskReport.ReportMap taskReport = FutureUtils.getUnchecked( - cluster.leaderOverlord().taskReportAsMap(taskStatus.getTaskId()), - true + final TaskReport.ReportMap taskReport = cluster.callApi().onLeaderOverlord( + o -> o.taskReportAsMap(taskStatus.getTaskId()) ); final Optional report = taskReport.findReport(MSQTaskReport.REPORT_KEY); diff --git a/services/src/test/java/org/apache/druid/testing/embedded/ClusterReferencesProvider.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/MSQExportDirectory.java similarity index 52% rename from services/src/test/java/org/apache/druid/testing/embedded/ClusterReferencesProvider.java rename to embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/MSQExportDirectory.java index 70f96aeb2a75..f3c284ed32bd 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/ClusterReferencesProvider.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/msq/MSQExportDirectory.java @@ -17,31 +17,41 @@ * under the License. */ -package org.apache.druid.testing.embedded; +package org.apache.druid.testing.embedded.msq; -import org.apache.druid.client.broker.BrokerClient; -import org.apache.druid.client.coordinator.CoordinatorClient; -import org.apache.druid.rpc.indexing.OverlordClient; +import org.apache.druid.testing.embedded.EmbeddedDruidCluster; +import org.apache.druid.testing.embedded.EmbeddedResource; + +import java.io.File; /** - * Provides a handle to the various service clients being used by an - * {@link EmbeddedDruidCluster}. + * Embedded resource to set MSQ export directory. */ -public interface ClusterReferencesProvider +public class MSQExportDirectory implements EmbeddedResource { - /** - * Client to make API calls to the leader Coordinator in the cluster. - */ - CoordinatorClient leaderCoordinator(); + private File directory; + + @Override + public void start() + { + // Do nothing + } - /** - * Client to make API calls to the leader Overlord in the cluster. - */ - OverlordClient leaderOverlord(); + @Override + public void stop() + { + // Do nothing + } - /** - * Client to submit queries to any Broker in the cluster. - */ - BrokerClient anyBroker(); + @Override + public void onStarted(EmbeddedDruidCluster cluster) + { + directory = cluster.getTestFolder().getOrCreateFolder("msq-export"); + cluster.addCommonProperty("druid.export.storage.baseDir", directory.getAbsolutePath()); + } + public File get() + { + return directory; + } } diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/HighAvailabilityTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/HighAvailabilityTest.java index 58a3a621ea25..e8d7382179d1 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/HighAvailabilityTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/HighAvailabilityTest.java @@ -78,15 +78,18 @@ protected EmbeddedDruidCluster createCluster() .addProperty("druid.plaintextPort", "7081") .addProperty("druid.manager.segments.useIncrementalCache", "always"); + // Keep the Router first in the list to ensure that EmbeddedServiceClient + // does not use clients from the Overlord or Coordinator, which are stopped + // during the test and may cause failures due to ServiceClient being closed. return EmbeddedDruidCluster.withEmbeddedDerbyAndZookeeper() .useLatchableEmitter() + .addServer(router) .addServer(overlord1) .addServer(overlord2) .addServer(coordinator1) .addServer(coordinator2) .addServer(indexer) - .addServer(broker) - .addServer(router); + .addServer(broker); } @Test diff --git a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/OverlordClientTest.java b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/OverlordClientTest.java index 25261553190a..36de4c072a7f 100644 --- a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/OverlordClientTest.java +++ b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/server/OverlordClientTest.java @@ -21,10 +21,7 @@ import com.amazonaws.util.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.util.concurrent.FutureCallback; -import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.indexing.IndexingTotalWorkerCapacityInfo; import org.apache.druid.client.indexing.IndexingWorkerInfo; import org.apache.druid.common.utils.IdUtils; @@ -58,8 +55,7 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; import java.util.stream.Collectors; /** @@ -114,7 +110,7 @@ public void test_runKillTask() public void test_cancelTask_fails_forUnknownTaskId() { verifyApiFailsWith( - cluster.leaderOverlord().cancelTask(UNKNOWN_TASK_ID), + o -> o.cancelTask(UNKNOWN_TASK_ID), UNKNOWN_TASK_ERROR ); } @@ -182,7 +178,7 @@ public void test_taskStatuses_forCompleteTasks() public void test_taskStatuses_byIds_returnsEmpty_forUnknownTaskIds() { Map result = cluster.callApi().onLeaderOverlord( - o -> o.taskStatuses(Set.of(UNKNOWN_TASK_ID)) + overlord -> overlord.taskStatuses(Set.of(UNKNOWN_TASK_ID)) ); Assertions.assertTrue(result.isEmpty()); } @@ -191,7 +187,7 @@ public void test_taskStatuses_byIds_returnsEmpty_forUnknownTaskIds() public void test_taskStatus_fails_forUnknownTaskId() { verifyApiFailsWith( - cluster.leaderOverlord().taskStatus(UNKNOWN_TASK_ID), + overlord -> overlord.taskStatus(UNKNOWN_TASK_ID), UNKNOWN_TASK_ERROR ); } @@ -200,7 +196,7 @@ public void test_taskStatus_fails_forUnknownTaskId() public void test_taskPayload_fails_forUnknownTaskId() { verifyApiFailsWith( - cluster.leaderOverlord().taskPayload(UNKNOWN_TASK_ID), + overlord -> overlord.taskPayload(UNKNOWN_TASK_ID), UNKNOWN_TASK_ERROR ); } @@ -256,7 +252,7 @@ public void test_postSupervisor_fails_ifRequiredExtensionIsNotLoaded() public void test_findLockedIntervals_fails_whenNoFilter() { verifyApiFailsWith( - cluster.leaderOverlord().findLockedIntervals(List.of()), + o -> o.findLockedIntervals(List.of()), "No filter provided" ); } @@ -354,38 +350,12 @@ public void test_markSegmentAsUnused() Assertions.assertNotNull(result); } - private static void verifyApiFailsWith(ListenableFuture future, String message) + private void verifyApiFailsWith(Function> overlordApi, String message) { - final CountDownLatch isFutureDone = new CountDownLatch(1); - final AtomicReference capturedError = new AtomicReference<>(); - Futures.addCallback( - future, - new FutureCallback<>() - { - @Override - public void onSuccess(T result) - { - isFutureDone.countDown(); - } - - @Override - public void onFailure(Throwable t) - { - capturedError.set(t); - isFutureDone.countDown(); - } - }, - MoreExecutors.directExecutor() + Exception exception = Assertions.assertThrows( + Exception.class, + () -> cluster.callApi().onLeaderOverlord(overlordApi) ); - - try { - isFutureDone.await(); - } - catch (InterruptedException e) { - throw new RuntimeException(e); - } - - Assertions.assertNotNull(capturedError.get()); - Assertions.assertTrue(capturedError.get().getMessage().contains(message)); + Assertions.assertTrue(exception.getMessage().contains(message)); } } diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java index 38158ece1eb9..6e2893b39c1d 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManager.java @@ -24,7 +24,7 @@ import com.google.inject.Inject; import com.google.inject.Injector; import org.apache.commons.lang3.ArrayUtils; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; +import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.concurrent.LifecycleLock; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.guice.annotations.Smile; @@ -40,6 +40,7 @@ import org.apache.druid.java.util.http.client.response.BytesFullResponseHandler; import org.apache.druid.java.util.http.client.response.BytesFullResponseHolder; import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.rpc.ServiceClient; import org.apache.druid.security.basic.BasicAuthCommonCacheConfig; import org.apache.druid.security.basic.BasicAuthUtils; import org.apache.druid.security.basic.authentication.BasicHTTPAuthenticator; @@ -73,7 +74,7 @@ public class CoordinatorPollingBasicAuthenticatorCacheManager implements BasicAu private final Injector injector; private final ObjectMapper objectMapper; private final LifecycleLock lifecycleLock = new LifecycleLock(); - private final CoordinatorServiceClient coordinatorClient; + private final ServiceClient coordinatorClient; private final BasicAuthCommonCacheConfig commonCacheConfig; private final ScheduledExecutorService exec; @@ -82,7 +83,7 @@ public CoordinatorPollingBasicAuthenticatorCacheManager( Injector injector, BasicAuthCommonCacheConfig commonCacheConfig, @Smile ObjectMapper objectMapper, - CoordinatorServiceClient coordinatorClient + @Coordinator ServiceClient coordinatorClient ) { this.exec = Execs.scheduledSingleThreaded("BasicAuthenticatorCacheManager-Exec--%d"); @@ -257,7 +258,7 @@ private Map tryFetchUserMapFromCoordinator(Strin HttpMethod.GET, StringUtils.format("/druid-ext/basic-security/authentication/db/%s/cachedSerializedUserMap", prefix) ); - BytesFullResponseHolder responseHolder = coordinatorClient.getServiceClient().request( + BytesFullResponseHolder responseHolder = coordinatorClient.request( req, new BytesFullResponseHandler() ); diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java index 51bcf652eafd..91ccb334cd33 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManager.java @@ -23,7 +23,7 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; import com.google.inject.Injector; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; +import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.concurrent.LifecycleLock; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.guice.annotations.Smile; @@ -39,6 +39,7 @@ import org.apache.druid.java.util.http.client.response.BytesFullResponseHandler; import org.apache.druid.java.util.http.client.response.BytesFullResponseHolder; import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.rpc.ServiceClient; import org.apache.druid.security.basic.BasicAuthCommonCacheConfig; import org.apache.druid.security.basic.BasicAuthUtils; import org.apache.druid.security.basic.authorization.BasicRoleBasedAuthorizer; @@ -78,7 +79,7 @@ public class CoordinatorPollingBasicAuthorizerCacheManager implements BasicAutho private final Injector injector; private final ObjectMapper objectMapper; private final LifecycleLock lifecycleLock = new LifecycleLock(); - private final CoordinatorServiceClient coordinatorClient; + private final ServiceClient coordinatorClient; private final BasicAuthCommonCacheConfig commonCacheConfig; private final ScheduledExecutorService exec; @@ -87,7 +88,7 @@ public CoordinatorPollingBasicAuthorizerCacheManager( Injector injector, BasicAuthCommonCacheConfig commonCacheConfig, @Smile ObjectMapper objectMapper, - CoordinatorServiceClient coordinatorClient + @Coordinator ServiceClient coordinatorClient ) { this.exec = Execs.scheduledSingleThreaded("CoordinatorPollingBasicAuthorizerCacheManager-Exec--%d"); @@ -400,7 +401,7 @@ private UserAndRoleMap tryFetchUserMapsFromCoordinator( HttpMethod.GET, StringUtils.format("/druid-ext/basic-security/authorization/db/%s/cachedSerializedUserMap", prefix) ); - BytesFullResponseHolder responseHolder = coordinatorClient.getServiceClient().request( + BytesFullResponseHolder responseHolder = coordinatorClient.request( req, new BytesFullResponseHandler() ); @@ -424,7 +425,7 @@ private GroupMappingAndRoleMap tryFetchGroupMappingMapsFromCoordinator( HttpMethod.GET, StringUtils.format("/druid-ext/basic-security/authorization/db/%s/cachedSerializedGroupMappingMap", prefix) ); - BytesFullResponseHolder responseHolder = coordinatorClient.getServiceClient().request( + BytesFullResponseHolder responseHolder = coordinatorClient.request( req, new BytesFullResponseHandler() ); diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManagerTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManagerTest.java index 3b8fffddf6c4..63fca692623c 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManagerTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authentication/db/cache/CoordinatorPollingBasicAuthenticatorCacheManagerTest.java @@ -20,7 +20,6 @@ package org.apache.druid.security.basic.authentication.db.cache; import com.google.inject.Injector; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.metrics.StubServiceEmitter; @@ -63,9 +62,7 @@ public void test_stop_interruptsPollingThread() throws InterruptedException, IOE .andReturn(new AuthenticatorMapper(Map.of("test-basic-auth", authenticator))).once(); // Create a mock leader client and request - final CoordinatorServiceClient leaderClient = EasyMock.createStrictMock(CoordinatorServiceClient.class); final MockServiceClient serviceClient = new MockServiceClient(); - EasyMock.expect(leaderClient.getServiceClient()).andReturn(serviceClient).anyTimes(); // Return the first request immediately final String path = StringUtils.format( @@ -100,14 +97,14 @@ public ChannelBuffer getContent() } ); - EasyMock.replay(injector, leaderClient); + EasyMock.replay(injector); final int numRetries = 10; final CoordinatorPollingBasicAuthenticatorCacheManager manager = new CoordinatorPollingBasicAuthenticatorCacheManager( injector, new BasicAuthCommonCacheConfig(0L, 1L, temporaryFolder.newFolder().getAbsolutePath(), numRetries), TestHelper.JSON_MAPPER, - leaderClient + serviceClient ); // Start the manager and wait for a while to ensure that polling has started @@ -120,7 +117,7 @@ public ChannelBuffer getContent() Assert.assertTrue(isInterrupted.get()); - EasyMock.verify(injector, leaderClient); + EasyMock.verify(injector); } } diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManagerTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManagerTest.java index 3f76d56578ed..5823dc08bb4f 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManagerTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/basic/authorization/db/cache/CoordinatorPollingBasicAuthorizerCacheManagerTest.java @@ -21,7 +21,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Injector; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.jackson.JacksonUtils; import org.apache.druid.java.util.emitter.EmittingLogger; @@ -64,7 +63,6 @@ public class CoordinatorPollingBasicAuthorizerCacheManagerTest // Mocks private Injector injector; - private CoordinatorServiceClient leaderClient; private MockServiceClient serviceClient; private CoordinatorPollingBasicAuthorizerCacheManager manager; @@ -80,26 +78,24 @@ public void setup() throws IOException .andReturn(new AuthorizerMapper(Map.of(AUTHORIZER_NAME, authorizer))).once(); serviceClient = new MockServiceClient(); - leaderClient = EasyMock.createStrictMock(CoordinatorServiceClient.class); - EasyMock.expect(leaderClient.getServiceClient()).andReturn(serviceClient).anyTimes(); final int numRetries = 1; manager = new CoordinatorPollingBasicAuthorizerCacheManager( injector, new BasicAuthCommonCacheConfig(0L, 1L, temporaryFolder.newFolder().getAbsolutePath(), numRetries), MAPPER, - leaderClient + serviceClient ); } private void replayAll() { - EasyMock.replay(injector, leaderClient); + EasyMock.replay(injector); } private void verifyAll() { - EasyMock.verify(injector, leaderClient); + EasyMock.verify(injector); } @Test diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/TaskBuilder.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/TaskBuilder.java index e616471034c0..f6e0e29f7d97 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/TaskBuilder.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/TaskBuilder.java @@ -41,7 +41,6 @@ import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexSupervisorTask; import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig; import org.apache.druid.indexing.input.DruidInputSource; -import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.segment.TestHelper; @@ -49,8 +48,6 @@ import org.joda.time.Interval; import java.io.File; -import java.net.URL; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -156,26 +153,11 @@ public B druidInputSource(String dataSource, Interval interval) * } * */ - public B localInputSourceWithFiles(String... resources) + public B localInputSourceWithFiles(File... files) { - try { - final List files = new ArrayList<>(); - for (String file : resources) { - final URL resourceUrl = getClass().getClassLoader().getResource(file); - if (resourceUrl == null) { - throw new ISE("Could not find file[%s]", file); - } - - files.add(new File(resourceUrl.toURI())); - } - - return inputSource( - new LocalInputSource(null, null, files, null) - ); - } - catch (Exception e) { - throw new RuntimeException(e); - } + return inputSource( + new LocalInputSource(null, null, List.of(files), null) + ); } public B inputFormat(InputFormat inputFormat) diff --git a/integration-tests-ex/cases/cluster/Security/docker-compose.py b/integration-tests-ex/cases/cluster/Security/docker-compose.py deleted file mode 100644 index 9ecb40de19e3..000000000000 --- a/integration-tests-ex/cases/cluster/Security/docker-compose.py +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from template import BaseTemplate, generate - -class Template(BaseTemplate): - - def define_indexer(self): - service = super().define_indexer() - self.add_property(service, 'druid.msq.intermediate.storage.enable', 'true') - self.add_property(service, 'druid.msq.intermediate.storage.type', 'local') - self.add_property(service, 'druid.msq.intermediate.storage.basePath', '/shared/durablestorage/') - self.add_property(service, 'druid.export.storage.baseDir', '/') - - # No kafka dependency in this cluster - def define_kafka(self): - pass - - def extend_druid_service(self, service): - self.add_env_file(service, '../Common/environment-configs/auth.env') - self.add_env(service, 'druid_test_loadList', 'druid-basic-security') - - -generate(__file__, Template()) diff --git a/integration-tests-ex/cases/pom.xml b/integration-tests-ex/cases/pom.xml index 36ed8b439086..9c493ec2b3e0 100644 --- a/integration-tests-ex/cases/pom.xml +++ b/integration-tests-ex/cases/pom.xml @@ -554,15 +554,6 @@ - - IT-Security - - false - - - Security - - IT-DruidExactCountBitmap diff --git a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/Security.java b/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/Security.java deleted file mode 100644 index 07054e1f4204..000000000000 --- a/integration-tests-ex/cases/src/test/java/org/apache/druid/testsEx/categories/Security.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.testsEx.categories; - -public class Security -{ -} diff --git a/integration-tests-ex/cases/src/test/resources/indexer/export_task.json b/integration-tests-ex/cases/src/test/resources/indexer/export_task.json deleted file mode 100644 index 7ba18e5a98f3..000000000000 --- a/integration-tests-ex/cases/src/test/resources/indexer/export_task.json +++ /dev/null @@ -1,222 +0,0 @@ -{ - "type": "query_controller", - "id": "%%QUERY_ID%%", - "spec": { - "query": { - "queryType": "scan", - "dataSource": { - "type": "external", - "inputSource": { - "type": "local", - "files": [ - "/resources/data/batch_index/json/wikipedia_index_data1.json" - ] - }, - "inputFormat": { - "type": "json", - "keepNullColumns": false, - "assumeNewlineDelimited": false, - "useJsonNodeReader": false - }, - "signature": [ - { - "name": "timestamp", - "type": "STRING" - }, - { - "name": "isRobot", - "type": "STRING" - }, - { - "name": "diffUrl", - "type": "STRING" - }, - { - "name": "added", - "type": "LONG" - }, - { - "name": "countryIsoCode", - "type": "STRING" - }, - { - "name": "regionName", - "type": "STRING" - }, - { - "name": "channel", - "type": "STRING" - }, - { - "name": "flags", - "type": "STRING" - }, - { - "name": "delta", - "type": "LONG" - }, - { - "name": "isUnpatrolled", - "type": "STRING" - }, - { - "name": "isNew", - "type": "STRING" - }, - { - "name": "deltaBucket", - "type": "DOUBLE" - }, - { - "name": "isMinor", - "type": "STRING" - }, - { - "name": "isAnonymous", - "type": "STRING" - }, - { - "name": "deleted", - "type": "LONG" - }, - { - "name": "cityName", - "type": "STRING" - }, - { - "name": "metroCode", - "type": "LONG" - }, - { - "name": "namespace", - "type": "STRING" - }, - { - "name": "comment", - "type": "STRING" - }, - { - "name": "page", - "type": "STRING" - }, - { - "name": "commentLength", - "type": "LONG" - }, - { - "name": "countryName", - "type": "STRING" - }, - { - "name": "user", - "type": "STRING" - }, - { - "name": "regionIsoCode", - "type": "STRING" - } - ] - }, - "intervals": { - "type": "intervals", - "intervals": [ - "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z" - ] - }, - "resultFormat": "compactedList", - "columns": [ - "added", - "delta", - "page" - ], - "context": { - "__exportFileFormat": "CSV", - "__resultFormat": "array", - "__user": "allowAll", - "executionMode": "async", - "finalize": false, - "finalizeAggregations": false, - "groupByEnableMultiValueUnnesting": false, - "maxNumTasks": 4, - "maxParseExceptions": 0, - "queryId": "b1491ce2-7d2a-4a7a-baa6-25a1a77135e5", - "scanSignature": "[{\"name\":\"added\",\"type\":\"LONG\"},{\"name\":\"delta\",\"type\":\"LONG\"},{\"name\":\"page\",\"type\":\"STRING\"}]", - "sqlQueryId": "b1491ce2-7d2a-4a7a-baa6-25a1a77135e5", - "sqlStringifyArrays": false, - "waitUntilSegmentsLoad": true - }, - "columnTypes": [ - "LONG", - "LONG", - "STRING" - ], - "granularity": { - "type": "all" - } - }, - "columnMappings": [ - { - "queryColumn": "page", - "outputColumn": "page" - }, - { - "queryColumn": "added", - "outputColumn": "added" - }, - { - "queryColumn": "delta", - "outputColumn": "delta" - } - ], - "destination": { - "type": "export", - "exportStorageProvider": { - "type": "local", - "exportPath": "/shared/export/" - }, - "resultFormat": "csv" - }, - "assignmentStrategy": "max", - "tuningConfig": { - "maxNumWorkers": 3, - "maxRowsInMemory": 100000, - "rowsPerSegment": 3000000 - } - }, - "sqlQuery": " INSERT INTO extern(local(exportPath => '/shared/export/'))\n AS CSV\n SELECT page, added, delta\n FROM TABLE(\n EXTERN(\n '{\"type\":\"local\",\"files\":[\"/resources/data/batch_index/json/wikipedia_index_data1.json\"]}',\n '{\"type\":\"json\"}',\n '[{\"type\":\"string\",\"name\":\"timestamp\"},{\"type\":\"string\",\"name\":\"isRobot\"},{\"type\":\"string\",\"name\":\"diffUrl\"},{\"type\":\"long\",\"name\":\"added\"},{\"type\":\"string\",\"name\":\"countryIsoCode\"},{\"type\":\"string\",\"name\":\"regionName\"},{\"type\":\"string\",\"name\":\"channel\"},{\"type\":\"string\",\"name\":\"flags\"},{\"type\":\"long\",\"name\":\"delta\"},{\"type\":\"string\",\"name\":\"isUnpatrolled\"},{\"type\":\"string\",\"name\":\"isNew\"},{\"type\":\"double\",\"name\":\"deltaBucket\"},{\"type\":\"string\",\"name\":\"isMinor\"},{\"type\":\"string\",\"name\":\"isAnonymous\"},{\"type\":\"long\",\"name\":\"deleted\"},{\"type\":\"string\",\"name\":\"cityName\"},{\"type\":\"long\",\"name\":\"metroCode\"},{\"type\":\"string\",\"name\":\"namespace\"},{\"type\":\"string\",\"name\":\"comment\"},{\"type\":\"string\",\"name\":\"page\"},{\"type\":\"long\",\"name\":\"commentLength\"},{\"type\":\"string\",\"name\":\"countryName\"},{\"type\":\"string\",\"name\":\"user\"},{\"type\":\"string\",\"name\":\"regionIsoCode\"}]'\n )\n )\n", - "sqlQueryContext": { - "__exportFileFormat": "CSV", - "finalizeAggregations": false, - "sqlQueryId": "b1491ce2-7d2a-4a7a-baa6-25a1a77135e5", - "groupByEnableMultiValueUnnesting": false, - "maxNumTasks": 4, - "waitUntilSegmentsLoad": true, - "executionMode": "async", - "__resultFormat": "array", - "sqlStringifyArrays": false, - "queryId": "b1491ce2-7d2a-4a7a-baa6-25a1a77135e5" - }, - "sqlResultsContext": { - "timeZone": "UTC", - "stringifyArrays": false - }, - "sqlTypeNames": [ - "VARCHAR", - "BIGINT", - "BIGINT" - ], - "nativeTypeNames": [ - "STRING", - "LONG", - "LONG" - ], - "context": { - "forceTimeChunkLock": true - }, - "groupId": "%%QUERY_ID%%", - "dataSource": "__query_export", - "resource": { - "availabilityGroup": "%%QUERY_ID%%", - "requiredCapacity": 1 - } -} diff --git a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java index e82580cd312e..d7cfbfee6e6c 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java @@ -25,7 +25,6 @@ import com.google.common.net.HostAndPort; import com.google.inject.Inject; import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.java.util.http.client.Request; @@ -264,23 +263,6 @@ public void deleteSegmentsDataSource(String dataSource, Interval interval) } } - public HttpResponseStatus getProxiedOverlordScalingResponseStatus() - { - try { - StatusResponseHolder response = makeRequest( - HttpMethod.GET, - StringUtils.format( - "%s/druid/indexer/v1/scaling", - coordinator - ) - ); - return response.getStatus(); - } - catch (Exception e) { - throw new RE(e, "Unable to get scaling status from [%s]", coordinator); - } - } - public Map initializeLookups(String filePath) throws Exception { String url = StringUtils.format("%slookups/config", getCoordinatorURL()); diff --git a/integration-tests/src/main/java/org/apache/druid/testing/clients/SecurityClient.java b/integration-tests/src/main/java/org/apache/druid/testing/clients/SecurityClient.java deleted file mode 100644 index 5d8075bd696a..000000000000 --- a/integration-tests/src/main/java/org/apache/druid/testing/clients/SecurityClient.java +++ /dev/null @@ -1,238 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.testing.clients; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import org.apache.druid.java.util.common.ISE; -import org.apache.druid.java.util.common.StringUtils; -import org.apache.druid.java.util.http.client.HttpClient; -import org.apache.druid.java.util.http.client.Request; -import org.apache.druid.java.util.http.client.response.StatusResponseHandler; -import org.apache.druid.java.util.http.client.response.StatusResponseHolder; -import org.apache.druid.server.security.ResourceAction; -import org.apache.druid.testing.IntegrationTestingConfig; -import org.jboss.netty.handler.codec.http.HttpMethod; -import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.testng.Assert; - -import javax.ws.rs.core.MediaType; -import java.io.IOException; -import java.net.URL; -import java.util.List; - -public class SecurityClient -{ - private final ObjectMapper jsonMapper; - private final HttpClient httpClient; - private final String coordinator; - private final StatusResponseHandler responseHandler; - - @Inject - SecurityClient( - ObjectMapper jsonMapper, - @AdminClient HttpClient httpClient, - IntegrationTestingConfig config - ) - { - this.jsonMapper = jsonMapper; - this.httpClient = httpClient; - this.coordinator = config.getCoordinatorUrl(); - this.responseHandler = StatusResponseHandler.getInstance(); - } - - public void createAuthenticationUser(String username) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/users/%s", - getAuthenticatorURL(), - StringUtils.urlEncode(username) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void deleteAuthenticationUser(String username) throws IOException - { - final Request request = new Request( - HttpMethod.DELETE, - new URL( - StringUtils.format( - "%s/users/%s", - getAuthenticatorURL(), - StringUtils.urlEncode(username) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void setUserPassword(String username, String password) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/users/%s/credentials", - getAuthenticatorURL(), - StringUtils.urlEncode(username) - ) - ) - ); - - request.setContent(MediaType.APPLICATION_JSON, jsonMapper.writeValueAsBytes(ImmutableMap.of("password", password))); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void createAuthorizerUser(String username) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/users/%s", - getAuthorizerURL(), - StringUtils.urlEncode(username) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void deleteAuthorizerUser(String username) throws IOException - { - final Request request = new Request( - HttpMethod.DELETE, - new URL( - StringUtils.format( - "%s/users/%s", - getAuthorizerURL(), - StringUtils.urlEncode(username) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void createAuthorizerRole(String role) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/roles/%s", - getAuthorizerURL(), - StringUtils.urlEncode(role) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void deleteAuthorizerRole(String role) throws IOException - { - final Request request = new Request( - HttpMethod.DELETE, - new URL( - StringUtils.format( - "%s/roles/%s", - getAuthorizerURL(), - StringUtils.urlEncode(role) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void assignUserToRole(String user, String role) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/users/%s/roles/%s", - getAuthorizerURL(), - StringUtils.urlEncode(user), - StringUtils.urlEncode(role) - ) - ) - ); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - public void setPermissionsToRole(String role, List permissions) throws IOException - { - final Request request = new Request( - HttpMethod.POST, - new URL( - StringUtils.format( - "%s/roles/%s/permissions/", - getAuthorizerURL(), - StringUtils.urlEncode(role) - ) - ) - ).setContent(MediaType.APPLICATION_JSON, jsonMapper.writeValueAsBytes(permissions)); - Assert.assertEquals(HttpResponseStatus.OK, sendRequest(request).getStatus()); - } - - private StatusResponseHolder sendRequest(Request request) - { - try { - final StatusResponseHolder response = httpClient.go( - request, - responseHandler - ).get(); - - if (!response.getStatus().equals(HttpResponseStatus.OK)) { - throw new ISE( - "Error while creating users status [%s] content [%s]", - response.getStatus(), - response.getContent() - ); - } - - return response; - } - catch (Exception e) { - throw new RuntimeException(e); - } - } - - private String getAuthenticatorURL() - { - return StringUtils.format( - "%s/druid-ext/basic-security/authentication/db/basic", - coordinator - ); - } - - private String getAuthorizerURL() - { - return StringUtils.format( - "%s/druid-ext/basic-security/authorization/db/basic", - coordinator - ); - } -} diff --git a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java b/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java deleted file mode 100644 index f6cc85ab1704..000000000000 --- a/integration-tests/src/test/java/org/apache/druid/tests/security/ITCoordinatorOverlordProxyAuthTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.tests.security; - -import com.google.inject.Inject; -import org.apache.druid.testing.clients.CoordinatorResourceTestClient; -import org.apache.druid.testing.guice.DruidTestModuleFactory; -import org.apache.druid.tests.TestNGGroup; -import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.testng.Assert; -import org.testng.annotations.Guice; -import org.testng.annotations.Test; - -@Test(groups = TestNGGroup.SECURITY) -@Guice(moduleFactory = DruidTestModuleFactory.class) -public class ITCoordinatorOverlordProxyAuthTest -{ - @Inject - CoordinatorResourceTestClient coordinatorClient; - - @Test - public void testProxyAuth() - { - HttpResponseStatus responseStatus = coordinatorClient.getProxiedOverlordScalingResponseStatus(); - Assert.assertEquals(responseStatus, HttpResponseStatus.OK); - } -} diff --git a/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorServiceClient.java b/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorServiceClient.java deleted file mode 100644 index a50d9b19862a..000000000000 --- a/server/src/main/java/org/apache/druid/client/coordinator/CoordinatorServiceClient.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.apache.druid.client.coordinator; - -import org.apache.druid.discovery.NodeRole; -import org.apache.druid.rpc.ServiceClient; -import org.apache.druid.rpc.ServiceClientFactory; -import org.apache.druid.rpc.ServiceLocator; -import org.apache.druid.rpc.StandardRetryPolicy; - -/** - * Wrapper over {@link ServiceClient} used to send requests to the Coordinator. - *

- * This client should be used to hit Coordinator APIs added by extensions. - * For core Coordinator APIs, use {@link CoordinatorClient} instead. - */ -public class CoordinatorServiceClient -{ - private final ServiceClient serviceClient; - - public CoordinatorServiceClient( - ServiceClientFactory clientFactory, - ServiceLocator coordinatorServiceLocator, - int maxRetryAttempts - ) - { - this.serviceClient = clientFactory.makeClient( - NodeRole.COORDINATOR.getJsonName(), - coordinatorServiceLocator, - StandardRetryPolicy.builder().maxAttempts(maxRetryAttempts).build() - ); - } - - public ServiceClient getServiceClient() - { - return serviceClient; - } -} diff --git a/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java b/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java index d99020af708b..203886f99fea 100644 --- a/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java +++ b/server/src/main/java/org/apache/druid/rpc/guice/ServiceClientModule.java @@ -28,7 +28,6 @@ import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.coordinator.CoordinatorClientImpl; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; import org.apache.druid.client.indexing.IndexingService; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.discovery.NodeRole; @@ -40,6 +39,7 @@ import org.apache.druid.java.util.common.concurrent.ScheduledExecutors; import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.rpc.DiscoveryServiceLocator; +import org.apache.druid.rpc.ServiceClient; import org.apache.druid.rpc.ServiceClientFactory; import org.apache.druid.rpc.ServiceClientFactoryImpl; import org.apache.druid.rpc.ServiceLocator; @@ -77,23 +77,29 @@ public ServiceLocator makeOverlordServiceLocator(final DruidNodeDiscoveryProvide } @Provides - @LazySingleton - public OverlordClient makeOverlordClient( - @Json final ObjectMapper jsonMapper, + @IndexingService + public ServiceClient makeServiceClientForOverlord( @EscalatedGlobal final ServiceClientFactory clientFactory, @IndexingService final ServiceLocator serviceLocator ) { - return new OverlordClientImpl( - clientFactory.makeClient( - NodeRole.OVERLORD.getJsonName(), - serviceLocator, - StandardRetryPolicy.builder().maxAttempts(CLIENT_MAX_ATTEMPTS).build() - ), - jsonMapper + return clientFactory.makeClient( + NodeRole.OVERLORD.getJsonName(), + serviceLocator, + StandardRetryPolicy.builder().maxAttempts(CLIENT_MAX_ATTEMPTS).build() ); } + @Provides + @LazySingleton + public OverlordClient makeOverlordClient( + @Json final ObjectMapper jsonMapper, + @IndexingService final ServiceClient serviceClient + ) + { + return new OverlordClientImpl(serviceClient, jsonMapper); + } + @Provides @ManageLifecycle @Coordinator @@ -103,36 +109,27 @@ public ServiceLocator makeCoordinatorServiceLocator(final DruidNodeDiscoveryProv } @Provides - @LazySingleton - public CoordinatorClient makeCoordinatorClient( - @Json final ObjectMapper jsonMapper, + @Coordinator + public ServiceClient makeServiceClientForCoordinator( @EscalatedGlobal final ServiceClientFactory clientFactory, @Coordinator final ServiceLocator serviceLocator ) { - return new CoordinatorClientImpl( - clientFactory.makeClient( - NodeRole.COORDINATOR.getJsonName(), - serviceLocator, - StandardRetryPolicy.builder().maxAttempts(CLIENT_MAX_ATTEMPTS).build() - ), - jsonMapper + return clientFactory.makeClient( + NodeRole.COORDINATOR.getJsonName(), + serviceLocator, + StandardRetryPolicy.builder().maxAttempts(CLIENT_MAX_ATTEMPTS).build() ); } - /** - * Creates a {@link CoordinatorServiceClient} used by extensions to send - * requests to the Coordinator. For core Coordinator APIs, - * {@link CoordinatorClient} should be used instead. - */ @Provides @LazySingleton - public static CoordinatorServiceClient createCoordinatorServiceClient( - @EscalatedGlobal ServiceClientFactory clientFactory, - @Coordinator ServiceLocator serviceLocator + public CoordinatorClient makeCoordinatorClient( + @Json final ObjectMapper jsonMapper, + @Coordinator final ServiceClient serviceClient ) { - return new CoordinatorServiceClient(clientFactory, serviceLocator, CLIENT_MAX_ATTEMPTS); + return new CoordinatorClientImpl(serviceClient, jsonMapper); } @Provides @@ -144,23 +141,29 @@ public ServiceLocator makeBrokerServiceLocator(final DruidNodeDiscoveryProvider } @Provides - @LazySingleton - public BrokerClient makeBrokerClient( - @Json final ObjectMapper jsonMapper, + @Broker + public ServiceClient makeServiceClientForBroker( @EscalatedGlobal final ServiceClientFactory clientFactory, @Broker final ServiceLocator serviceLocator ) { - return new BrokerClientImpl( - clientFactory.makeClient( - NodeRole.BROKER.getJsonName(), - serviceLocator, - StandardRetryPolicy.builder().maxAttempts(ServiceClientModule.CLIENT_MAX_ATTEMPTS).build() - ), - jsonMapper + return clientFactory.makeClient( + NodeRole.BROKER.getJsonName(), + serviceLocator, + StandardRetryPolicy.builder().maxAttempts(ServiceClientModule.CLIENT_MAX_ATTEMPTS).build() ); } + @Provides + @LazySingleton + public BrokerClient makeBrokerClient( + @Json final ObjectMapper jsonMapper, + @Broker final ServiceClient serviceClient + ) + { + return new BrokerClientImpl(serviceClient, jsonMapper); + } + public static ServiceClientFactory makeServiceClientFactory(@EscalatedGlobal final HttpClient httpClient) { final ScheduledExecutorService connectExec = diff --git a/server/src/test/java/org/apache/druid/rpc/guice/ServiceClientModuleTest.java b/server/src/test/java/org/apache/druid/rpc/guice/ServiceClientModuleTest.java index a084a1e53139..f039a023ad9f 100644 --- a/server/src/test/java/org/apache/druid/rpc/guice/ServiceClientModuleTest.java +++ b/server/src/test/java/org/apache/druid/rpc/guice/ServiceClientModuleTest.java @@ -22,15 +22,19 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Guice; import com.google.inject.Injector; +import com.google.inject.Key; +import org.apache.druid.client.broker.Broker; import org.apache.druid.client.broker.BrokerClient; +import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.client.coordinator.CoordinatorClient; -import org.apache.druid.client.coordinator.CoordinatorServiceClient; +import org.apache.druid.client.indexing.IndexingService; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.guice.DruidGuiceExtensions; import org.apache.druid.guice.LifecycleModule; import org.apache.druid.guice.annotations.EscalatedGlobal; import org.apache.druid.jackson.JacksonModule; import org.apache.druid.java.util.http.client.HttpClient; +import org.apache.druid.rpc.ServiceClient; import org.apache.druid.rpc.ServiceClientFactory; import org.apache.druid.rpc.ServiceLocator; import org.apache.druid.rpc.indexing.OverlordClient; @@ -108,6 +112,18 @@ public void testGetBrokerClient() @Test public void testGetCoordinatorServiceClient() { - assertNotNull(injector.getInstance(CoordinatorServiceClient.class)); + assertNotNull(injector.getInstance(Key.get(ServiceClient.class, Coordinator.class))); + } + + @Test + public void testGetOverlordServiceClient() + { + assertNotNull(injector.getInstance(Key.get(ServiceClient.class, IndexingService.class))); + } + + @Test + public void testGetBrokerServiceClient() + { + assertNotNull(injector.getInstance(Key.get(ServiceClient.class, Broker.class))); } } diff --git a/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedClusterApis.java b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedClusterApis.java index 954e0f117c43..878cc055ffdb 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedClusterApis.java +++ b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedClusterApis.java @@ -22,9 +22,9 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; +import org.apache.druid.client.broker.BrokerClient; import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.client.indexing.TaskStatusResponse; -import org.apache.druid.common.guava.FutureUtils; import org.apache.druid.common.utils.IdUtils; import org.apache.druid.indexer.TaskState; import org.apache.druid.indexer.TaskStatus; @@ -66,28 +66,60 @@ * @see #onLeaderOverlord(Function) * @see #runSql(String, Object...) */ -public class EmbeddedClusterApis +public class EmbeddedClusterApis implements EmbeddedResource { private final EmbeddedDruidCluster cluster; + private EmbeddedServiceClient client; EmbeddedClusterApis(EmbeddedDruidCluster cluster) { this.cluster = cluster; } + @Override + public void start() throws Exception + { + this.client = EmbeddedServiceClient.create(cluster, null); + } + + @Override + public void stop() throws Exception + { + if (client != null) { + client.stop(); + client = null; + } + } + + /** + * Client used for all the API calls made by this {@link EmbeddedClusterApis}. + */ + public EmbeddedServiceClient serviceClient() + { + return Objects.requireNonNull( + client, + "Service clients are not initialized. Ensure that the cluster has started properly." + ); + } + public T onLeaderCoordinator(Function> coordinatorApi) { - return getResult(coordinatorApi.apply(cluster.leaderCoordinator())); + return client.onLeaderCoordinator(coordinatorApi); } public T onLeaderCoordinatorSync(Function coordinatorApi) { - return coordinatorApi.apply(cluster.leaderCoordinator()); + return client.onLeaderCoordinatorSync(coordinatorApi); } public T onLeaderOverlord(Function> overlordApi) { - return getResult(overlordApi.apply(cluster.leaderOverlord())); + return client.onLeaderOverlord(overlordApi); + } + + public T onAnyBroker(Function> brokerApi) + { + return client.onAnyBroker(brokerApi); } /** @@ -99,8 +131,8 @@ public T onLeaderOverlord(Function> over public String runSql(String sql, Object... args) { try { - return getResult( - cluster.anyBroker().submitSqlQuery( + return onAnyBroker( + b -> b.submitSqlQuery( new ClientSqlQuery( StringUtils.format(sql, args), ResultFormat.CSV.name(), @@ -162,9 +194,11 @@ public void submitTask(Task task) */ public void waitForTaskToSucceed(String taskId, EmbeddedOverlord overlord) { + TaskStatus taskStatus = waitForTaskToFinish(taskId, overlord); Assertions.assertEquals( TaskState.SUCCESS, - waitForTaskToFinish(taskId, overlord).getStatusCode() + taskStatus.getStatusCode(), + StringUtils.format("Task[%s] failed with error[%s]", taskId, taskStatus.getErrorMsg()) ); } @@ -380,11 +414,6 @@ public static List createAlignedIntervals( return alignedIntervals; } - private static T getResult(ListenableFuture future) - { - return FutureUtils.getUnchecked(future, true); - } - @FunctionalInterface public interface TaskBuilder { diff --git a/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedDruidCluster.java b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedDruidCluster.java index 3fb2cba18bc5..dabceaff4b98 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedDruidCluster.java +++ b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedDruidCluster.java @@ -21,13 +21,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import org.apache.druid.client.broker.BrokerClient; -import org.apache.druid.client.coordinator.CoordinatorClient; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; -import org.apache.druid.rpc.indexing.OverlordClient; import org.apache.druid.server.metrics.LatchableEmitter; import org.apache.druid.testing.embedded.derby.InMemoryDerbyModule; import org.apache.druid.testing.embedded.derby.InMemoryDerbyResource; @@ -71,7 +68,7 @@ * cluster.stop(); * */ -public class EmbeddedDruidCluster implements ClusterReferencesProvider, EmbeddedResource +public class EmbeddedDruidCluster implements EmbeddedResource { private static final Logger log = new Logger(EmbeddedDruidCluster.class); @@ -250,6 +247,12 @@ public void start() throws Exception { Preconditions.checkArgument(!servers.isEmpty(), "Cluster must have at least one embedded Druid server"); + // Add clusterApis as the last entry in the resources list, so that the + // EmbeddedServiceClient is initialized after mappers have been injected into the servers + if (!startedFirstDruidServer) { + resources.add(clusterApis); + } + // Start the resources in order for (EmbeddedResource resource : resources) { try { @@ -314,25 +317,12 @@ public String runSql(String sql, Object... args) return clusterApis.runSql(sql, args); } - @Override - public CoordinatorClient leaderCoordinator() - { - return findServerOfType(EmbeddedCoordinator.class).bindings().leaderCoordinator(); - } - - @Override - public OverlordClient leaderOverlord() - { - return findServerOfType(EmbeddedOverlord.class).bindings().leaderOverlord(); - } - - @Override - public BrokerClient anyBroker() + EmbeddedDruidServer anyServer() { - return findServerOfType(EmbeddedBroker.class).bindings().anyBroker(); + return servers.get(0); } - private > EmbeddedDruidServer findServerOfType( + > S findServerOfType( Class serverType ) { diff --git a/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedServiceClient.java b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedServiceClient.java new file mode 100644 index 000000000000..cf7322b1b8ff --- /dev/null +++ b/services/src/test/java/org/apache/druid/testing/embedded/EmbeddedServiceClient.java @@ -0,0 +1,225 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.testing.embedded; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Throwables; +import com.google.common.util.concurrent.ListenableFuture; +import org.apache.druid.client.broker.Broker; +import org.apache.druid.client.broker.BrokerClient; +import org.apache.druid.client.coordinator.Coordinator; +import org.apache.druid.client.coordinator.CoordinatorClient; +import org.apache.druid.client.indexing.IndexingService; +import org.apache.druid.common.guava.FutureUtils; +import org.apache.druid.java.util.common.ISE; +import org.apache.druid.java.util.common.concurrent.ScheduledExecutors; +import org.apache.druid.java.util.http.client.HttpClient; +import org.apache.druid.java.util.http.client.response.StatusResponseHandler; +import org.apache.druid.java.util.http.client.response.StatusResponseHolder; +import org.apache.druid.rpc.RequestBuilder; +import org.apache.druid.rpc.ServiceClient; +import org.apache.druid.rpc.ServiceClientFactory; +import org.apache.druid.rpc.ServiceClientFactoryImpl; +import org.apache.druid.rpc.ServiceLocator; +import org.apache.druid.rpc.guice.ServiceClientModule; +import org.apache.druid.rpc.indexing.OverlordClient; +import org.apache.druid.server.security.Escalator; +import org.jboss.netty.handler.codec.http.HttpResponseStatus; + +import javax.annotation.Nullable; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +/** + * Client to make requests to various services in an embedded test cluster. + * + * @see #onLeaderOverlord(Function) + * @see #onLeaderCoordinator(Function) + * @see #onAnyBroker(Function) + */ +public class EmbeddedServiceClient +{ + private final EmbeddedDruidCluster cluster; + private final ServiceClientModule module; + + private final ServiceClient coordinatorServiceClient; + private final ServiceClient overlordServiceClient; + private final ServiceClient brokerServiceClient; + + private final ScheduledExecutorService clientConnectExec; + private final StatusResponseHandler responseHandler; + + private EmbeddedServiceClient(EmbeddedDruidCluster cluster, Escalator escalator) + { + // Use a ServiceClientModule to create the various clients + this.module = new ServiceClientModule(); + this.cluster = cluster; + this.clientConnectExec = ScheduledExecutors.fixed(4, "ServiceClientFactory-%d"); + this.responseHandler = StatusResponseHandler.getInstance(); + + // If this server is stopped, the client becomes invalid + final EmbeddedDruidServer anyServer = cluster.anyServer(); + + final HttpClient escalatedHttpClient = + escalator == null + ? anyServer.bindings().escalatedHttpClient() + : escalator.createEscalatedClient(anyServer.bindings().globalHttpClient()); + + // Create service clients + final ServiceClientFactory factory = new ServiceClientFactoryImpl(escalatedHttpClient, clientConnectExec); + this.overlordServiceClient = module.makeServiceClientForOverlord( + factory, + anyServer.bindings().getInstance(ServiceLocator.class, IndexingService.class) + ); + + this.brokerServiceClient = module.makeServiceClientForBroker( + factory, + anyServer.bindings().getInstance(ServiceLocator.class, Broker.class) + ); + this.coordinatorServiceClient = module.makeServiceClientForCoordinator( + factory, + anyServer.bindings().getInstance(ServiceLocator.class, Coordinator.class) + ); + } + + /** + * Creates a client that uses the {@link Escalator} bound to the embedded servers. + */ + public static EmbeddedServiceClient create(EmbeddedDruidCluster cluster) + { + return new EmbeddedServiceClient(cluster, null); + } + + /** + * Creates a client using the specified {@link Escalator}. All requests made by this + * client will use the given escalator. + */ + public static EmbeddedServiceClient create(EmbeddedDruidCluster cluster, Escalator escalator) + { + return new EmbeddedServiceClient(cluster, escalator); + } + + /** + * Stops the executor service used by this client. + */ + public void stop() throws InterruptedException + { + clientConnectExec.shutdownNow(); + clientConnectExec.awaitTermination(1, TimeUnit.MINUTES); + } + + @Nullable + public T onLeaderCoordinator( + Function request, + TypeReference resultType + ) + { + return makeRequest(request, resultType, coordinatorServiceClient, getMapper(EmbeddedCoordinator.class)); + } + + public T onLeaderCoordinator(Function> coordinatorApi) + { + return getResult(coordinatorApi.apply(createCoordinatorClient())); + } + + public T onLeaderCoordinatorSync(Function coordinatorApi) + { + return coordinatorApi.apply(createCoordinatorClient()); + } + + public T onLeaderOverlord(Function> overlordApi) + { + return getResult(overlordApi.apply(createOverlordClient())); + } + + @Nullable + public T onLeaderOverlord( + Function request, + TypeReference resultType + ) + { + return makeRequest(request, resultType, overlordServiceClient, getMapper(EmbeddedOverlord.class)); + } + + public T onAnyBroker(Function> brokerApi) + { + return getResult(brokerApi.apply(createBrokerClient())); + } + + @Nullable + private T makeRequest( + Function request, + TypeReference resultType, + ServiceClient serviceClient, + ObjectMapper mapper + ) + { + final RequestBuilder requestBuilder = request.apply(mapper); + + try { + StatusResponseHolder response = serviceClient.request(requestBuilder, responseHandler); + if (!response.getStatus().equals(HttpResponseStatus.OK)) { + throw new ISE( + "Request[%s] failed with status[%s] content[%s].", + requestBuilder.toString(), + response.getStatus(), + response.getContent() + ); + } + + if (resultType == null) { + return null; + } else { + return mapper.readValue(response.getContent(), resultType); + } + } + catch (Exception e) { + Throwables.throwIfUnchecked(e); + throw new RuntimeException(e); + } + } + + private CoordinatorClient createCoordinatorClient() + { + return module.makeCoordinatorClient(getMapper(EmbeddedCoordinator.class), coordinatorServiceClient); + } + + private OverlordClient createOverlordClient() + { + return module.makeOverlordClient(getMapper(EmbeddedOverlord.class), overlordServiceClient); + } + + private BrokerClient createBrokerClient() + { + return module.makeBrokerClient(getMapper(EmbeddedBroker.class), brokerServiceClient); + } + + private > ObjectMapper getMapper(Class serverType) + { + return cluster.findServerOfType(serverType).bindings().jsonMapper(); + } + + private static T getResult(ListenableFuture future) + { + return FutureUtils.getUnchecked(future, true); + } +} diff --git a/services/src/test/java/org/apache/druid/testing/embedded/ServerReferenceHolder.java b/services/src/test/java/org/apache/druid/testing/embedded/ServerReferenceHolder.java index 2e878621afe8..95966e8e3903 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/ServerReferenceHolder.java +++ b/services/src/test/java/org/apache/druid/testing/embedded/ServerReferenceHolder.java @@ -22,6 +22,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; import com.google.inject.Injector; +import com.google.inject.Key; import org.apache.druid.client.broker.BrokerClient; import org.apache.druid.client.coordinator.Coordinator; import org.apache.druid.client.coordinator.CoordinatorClient; @@ -29,6 +30,7 @@ import org.apache.druid.discovery.DruidLeaderSelector; import org.apache.druid.discovery.DruidNodeDiscoveryProvider; import org.apache.druid.guice.annotations.EscalatedGlobal; +import org.apache.druid.guice.annotations.Global; import org.apache.druid.guice.annotations.Json; import org.apache.druid.guice.annotations.Self; import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; @@ -38,6 +40,7 @@ import org.apache.druid.server.DruidNode; import org.apache.druid.server.metrics.LatchableEmitter; +import java.lang.annotation.Annotation; import java.util.Objects; /** @@ -78,7 +81,11 @@ public final class ServerReferenceHolder implements ServerReferencesProvider @Inject @EscalatedGlobal - private HttpClient httpClient; + private HttpClient escalatedHttpClient; + + @Inject + @Global + private HttpClient globalHttpClient; @Self @Inject @@ -154,7 +161,13 @@ public DruidNodeDiscoveryProvider nodeDiscovery() @Override public HttpClient escalatedHttpClient() { - return httpClient; + return escalatedHttpClient; + } + + @Override + public HttpClient globalHttpClient() + { + return globalHttpClient; } @Override @@ -168,4 +181,10 @@ public T getInstance(Class clazz) { return injector.getInstance(clazz); } + + @Override + public T getInstance(Class type, Class annotationType) + { + return injector.getInstance(Key.get(type, annotationType)); + } } diff --git a/services/src/test/java/org/apache/druid/testing/embedded/ServerReferencesProvider.java b/services/src/test/java/org/apache/druid/testing/embedded/ServerReferencesProvider.java index ca24218e0fb5..c9d746d565bb 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/ServerReferencesProvider.java +++ b/services/src/test/java/org/apache/druid/testing/embedded/ServerReferencesProvider.java @@ -32,6 +32,8 @@ import org.apache.druid.server.DruidNode; import org.apache.druid.server.metrics.LatchableEmitter; +import java.lang.annotation.Annotation; + /** * Provides a handle to the various objects used by an {@link EmbeddedDruidServer} * during an embedded cluster test. The returned references should be used for @@ -95,6 +97,11 @@ public interface ServerReferencesProvider */ HttpClient escalatedHttpClient(); + /** + * Non-escalated {@link HttpClient} used by this server to communicate with other Druid servers. + */ + HttpClient globalHttpClient(); + /** * {@link ObjectMapper} annotated with {@link Json}. */ @@ -105,4 +112,10 @@ public interface ServerReferencesProvider * The returned object must be used for read-only purposes. */ T getInstance(Class clazz); + + /** + * Gets the injected instance of the object of the specified type. + * The returned object must be used for read-only purposes. + */ + T getInstance(Class clazz, Class annotation); } diff --git a/services/src/test/java/org/apache/druid/testing/embedded/indexing/Resources.java b/services/src/test/java/org/apache/druid/testing/embedded/indexing/Resources.java index 7af3d3a7c7a2..9f6ae4713765 100644 --- a/services/src/test/java/org/apache/druid/testing/embedded/indexing/Resources.java +++ b/services/src/test/java/org/apache/druid/testing/embedded/indexing/Resources.java @@ -19,11 +19,36 @@ package org.apache.druid.testing.embedded.indexing; +import com.google.common.base.Throwables; +import org.apache.druid.java.util.common.ISE; + +import java.io.File; +import java.net.URL; + /** * Constants and utility methods used in embedded cluster tests. */ public class Resources { + /** + * Returns the {@link File} for the given local resource. + */ + public static File getFileForResource(String resourceName) + { + final URL resourceUrl = DataFile.class.getClassLoader().getResource(resourceName); + if (resourceUrl == null) { + throw new ISE("Could not find resource file[%s]", resourceName); + } + + try { + return new File(resourceUrl.toURI()); + } + catch (Exception e) { + Throwables.throwIfUnchecked(e); + throw new RuntimeException(e); + } + } + public static class InlineData { /** @@ -67,9 +92,20 @@ public static class InlineData public static class DataFile { - public static final String TINY_WIKI_1_JSON = "data/json/tiny_wiki_1.json"; - public static final String TINY_WIKI_2_JSON = "data/json/tiny_wiki_2.json"; - public static final String TINY_WIKI_3_JSON = "data/json/tiny_wiki_3.json"; + public static File tinyWiki1Json() + { + return getFileForResource("data/json/tiny_wiki_1.json"); + } + + public static File tinyWiki2Json() + { + return getFileForResource("data/json/tiny_wiki_2.json"); + } + + public static File tinyWiki3Json() + { + return getFileForResource("data/json/tiny_wiki_3.json"); + } } /**