Skip to content

Commit 33c38af

Browse files
authored
Add DruidContainers to run docker tests with embedded-test framework (#18302)
Summary: - Support running Druid Docker containers in embedded tests - Use the Druid distribution image for these containers rather than a test-only image as previously done by ITs - Add a `CliEventCollector` to monitor the containers and write efficient tests - Run new job `docker-tests` in GHA which runs a few Docker-based tests
1 parent b8417a5 commit 33c38af

File tree

82 files changed

+2735
-1523
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

82 files changed

+2735
-1523
lines changed

.github/scripts/run_docker-tests

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
#!/bin/bash
2+
3+
# Licensed to the Apache Software Foundation (ASF) under one or more
4+
# contributor license agreements. See the NOTICE file distributed with
5+
# this work for additional information regarding copyright ownership.
6+
# The ASF licenses this file to You under the Apache License, Version 2.0
7+
# (the "License"); you may not use this file except in compliance with
8+
# the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing, software
13+
# distributed under the License is distributed on an "AS IS" BASIS,
14+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
# See the License for the specific language governing permissions and
16+
# limitations under the License.
17+
18+
# Runs Docker tests
19+
# Requires environment variable DRUID_DIST_IMAGE_NAME
20+
21+
set -e
22+
23+
DRUID_IMAGE_NAME_ENV="DRUID_DIST_IMAGE_NAME"
24+
DRUID_IMAGE_NAME=$DRUID_DIST_IMAGE_NAME
25+
26+
DRUID_IMAGE_SYS_PROPERTY="druid.testing.docker.image"
27+
28+
if [ -z "${DRUID_IMAGE_NAME}" ]; then
29+
echo "ERROR!! Environment variable [$DRUID_IMAGE_NAME_ENV] not set!"
30+
echo "Run 'export $DRUID_IMAGE_NAME_ENV=<druid-image-to-test>' to specify the image to use in the Docker tests."
31+
exit 1
32+
else
33+
echo "Running Docker tests with image[$DRUID_IMAGE_NAME]"
34+
fi
35+
36+
OPTS+="-pl embedded-tests"
37+
mvn -B $OPTS verify -Pdocker-tests -D$DRUID_IMAGE_SYS_PROPERTY=$DRUID_IMAGE_NAME "-DjfrProfilerArgLine=$JFR_PROFILER_ARG_LINE" "$@"

.github/workflows/cron-job-its.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ jobs:
8888
strategy:
8989
fail-fast: false
9090
matrix:
91-
testing_group: [ query, query-retry, query-error, security, high-availability ]
91+
testing_group: [ query, query-retry, query-error, security ]
9292
uses: ./.github/workflows/reusable-standard-its.yml
9393
needs: build
9494
with:
@@ -109,7 +109,7 @@ jobs:
109109
with:
110110
build_jdk: 17
111111
runtime_jdk: 17
112-
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,high-availability,custom-coordinator-duties
112+
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,custom-coordinator-duties
113113
use_indexer: ${{ matrix.indexer }}
114114
group: other
115115

.github/workflows/docker-tests.yml

Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one or more
2+
# contributor license agreements. See the NOTICE file distributed with
3+
# this work for additional information regarding copyright ownership.
4+
# The ASF licenses this file to You under the Apache License, Version 2.0
5+
# (the "License"); you may not use this file except in compliance with
6+
# the License. You may obtain a copy of the License at
7+
#
8+
# http://www.apache.org/licenses/LICENSE-2.0
9+
#
10+
# Unless required by applicable law or agreed to in writing, software
11+
# distributed under the License is distributed on an "AS IS" BASIS,
12+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
# See the License for the specific language governing permissions and
14+
# limitations under the License.
15+
16+
name: "Docker Tests using Distribution Image"
17+
on:
18+
workflow_call:
19+
20+
jobs:
21+
run-docker-tests:
22+
name: Run Docker tests
23+
runs-on: ubuntu-latest
24+
steps:
25+
- uses: actions/checkout@v4
26+
- name: Set Docker image env var
27+
run: echo "DRUID_DIST_IMAGE_NAME=apache/druid:docker-tests" >> $GITHUB_ENV
28+
- name: Build the Docker image
29+
run: DOCKER_BUILDKIT=1 docker build -t $DRUID_DIST_IMAGE_NAME -f distribution/docker/Dockerfile .
30+
- name: Save Docker image to archive
31+
run: |
32+
echo "Saving image $DRUID_DIST_IMAGE_NAME in archive druid-dist-container.tar.gz"
33+
docker save "$DRUID_DIST_IMAGE_NAME" | gzip > druid-dist-container.tar.gz
34+
- name: Stop and remove Druid Docker containers
35+
run: |
36+
echo "Force stopping all Druid containers and pruning"
37+
docker ps -aq --filter "ancestor=apache/druid" | xargs -r docker rm -f
38+
docker system prune -af --volumes
39+
- name: Load Docker image
40+
run: |
41+
docker load --input druid-dist-container.tar.gz
42+
docker images
43+
- name: Setup Java
44+
uses: actions/setup-java@v4
45+
with:
46+
distribution: 'zulu'
47+
java-version: 17
48+
cache: 'maven'
49+
- name: Run Docker tests
50+
id: run-it
51+
run: .github/scripts/run_docker-tests
52+
timeout-minutes: 60
53+
54+
- name: Collect docker logs on failure
55+
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
56+
run: |
57+
mkdir docker-logs
58+
for c in $(docker ps -a --format="{{.Names}}")
59+
do
60+
docker logs $c > ./docker-logs/$c.log
61+
done
62+
63+
- name: Tar docker logs
64+
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
65+
run: tar cvzf ./docker-logs.tgz ./docker-logs
66+
67+
- name: Upload docker logs to GitHub
68+
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
69+
uses: actions/upload-artifact@v4
70+
with:
71+
name: failure-docker-logs
72+
path: docker-logs.tgz
73+
74+
- name: Collect surefire reports on failure
75+
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
76+
run: |
77+
tar cvzf ./surefire-logs.tgz ./embedded-tests/target/surefire-reports
78+
79+
- name: Upload surefire reports to GitHub
80+
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
81+
uses: actions/upload-artifact@v4
82+
with:
83+
name: failure-surefire-logs
84+
path: surefire-logs.tgz

.github/workflows/revised-its.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ jobs:
6767
fail-fast: false
6868
matrix:
6969
jdk: [17]
70-
it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Query, DruidExactCountBitmap]
70+
it: [MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Query, DruidExactCountBitmap]
7171
indexer: [middleManager]
7272
uses: ./.github/workflows/reusable-revised-its.yml
7373
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}

.github/workflows/standard-its.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ jobs:
7878
strategy:
7979
fail-fast: false
8080
matrix:
81-
testing_group: [query, query-retry, query-error, security, high-availability, centralized-datasource-schema]
81+
testing_group: [query, query-retry, query-error, security, centralized-datasource-schema]
8282
uses: ./.github/workflows/reusable-standard-its.yml
8383
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
8484
with:
@@ -181,6 +181,6 @@ jobs:
181181
with:
182182
build_jdk: 17
183183
runtime_jdk: 17
184-
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,high-availability,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled
184+
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled
185185
use_indexer: ${{ matrix.indexer }}
186186
group: other

.github/workflows/unit-and-integration-tests-unified.yml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,8 +174,12 @@ jobs:
174174
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
175175
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
176176

177+
docker-tests:
178+
needs: [build, unit-tests]
179+
uses: ./.github/workflows/docker-tests.yml
180+
177181
actions-timeline:
178-
needs: [build, unit-tests, revised-its, standard-its]
182+
needs: [build, unit-tests, revised-its, standard-its, docker-tests]
179183
runs-on: ubuntu-latest
180184
if: ${{ !cancelled() }}
181185
steps:

distribution/docker/druid.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,13 @@ SCONFIG=$(eval echo \$$(echo $SCONFIG))
129129

130130
if [ -n "${SCONFIG}" ]
131131
then
132+
# Create service conf directory as it may not exist for custom node roles
133+
if [ ! -d "$SERVICE_CONF_DIR" ]
134+
then
135+
echo "Creating conf directory '$SERVICE_CONF_DIR'"
136+
mkdir -p $SERVICE_CONF_DIR
137+
fi
138+
132139
cp -f "${SCONFIG}" $SERVICE_CONF_DIR/runtime.properties
133140
fi
134141

embedded-tests/pom.xml

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,12 +80,28 @@
8080
<version>${project.parent.version}</version>
8181
<scope>test</scope>
8282
</dependency>
83+
<dependency>
84+
<groupId>org.apache.druid.extensions</groupId>
85+
<artifactId>postgresql-metadata-storage</artifactId>
86+
<version>${project.parent.version}</version>
87+
<scope>test</scope>
88+
</dependency>
8389
<dependency>
8490
<groupId>org.apache.druid.extensions</groupId>
8591
<artifactId>druid-s3-extensions</artifactId>
8692
<version>${project.parent.version}</version>
8793
<scope>test</scope>
8894
</dependency>
95+
<dependency>
96+
<groupId>org.apache.druid.extensions</groupId>
97+
<artifactId>druid-testcontainers</artifactId>
98+
<version>${project.parent.version}</version>
99+
</dependency>
100+
<dependency>
101+
<groupId>org.apache.druid.extensions</groupId>
102+
<artifactId>druid-testing-tools</artifactId>
103+
<version>${project.parent.version}</version>
104+
</dependency>
89105
<dependency>
90106
<groupId>org.apache.druid.extensions</groupId>
91107
<artifactId>druid-basic-security</artifactId>
@@ -100,6 +116,14 @@
100116
<groupId>com.google.guava</groupId>
101117
<artifactId>guava</artifactId>
102118
</dependency>
119+
<dependency>
120+
<groupId>com.google.inject</groupId>
121+
<artifactId>guice</artifactId>
122+
</dependency>
123+
<dependency>
124+
<groupId>org.apache.httpcomponents</groupId>
125+
<artifactId>httpclient</artifactId>
126+
</dependency>
103127
<dependency>
104128
<groupId>io.netty</groupId>
105129
<artifactId>netty</artifactId>
@@ -215,6 +239,11 @@
215239
<groupId>junit</groupId>
216240
<artifactId>junit</artifactId>
217241
</dependency>
242+
<dependency>
243+
<groupId>org.junit.platform</groupId>
244+
<artifactId>junit-platform-launcher</artifactId>
245+
<scope>test</scope>
246+
</dependency>
218247
<dependency>
219248
<groupId>org.hamcrest</groupId>
220249
<artifactId>hamcrest-all</artifactId>
@@ -268,6 +297,12 @@
268297
<version>${testcontainers.version}</version>
269298
<scope>test</scope>
270299
</dependency>
300+
<dependency>
301+
<groupId>org.testcontainers</groupId>
302+
<artifactId>postgresql</artifactId>
303+
<version>${testcontainers.version}</version>
304+
<scope>test</scope>
305+
</dependency>
271306
<dependency>
272307
<groupId>org.testcontainers</groupId>
273308
<artifactId>kafka</artifactId>
@@ -313,6 +348,51 @@
313348
<skip>true</skip>
314349
</configuration>
315350
</plugin>
351+
<!-- Do not run Docker tests except with the docker-tests profile -->
352+
<plugin>
353+
<groupId>org.apache.maven.plugins</groupId>
354+
<artifactId>maven-surefire-plugin</artifactId>
355+
<configuration>
356+
<excludedGroups>docker-test</excludedGroups>
357+
</configuration>
358+
</plugin>
316359
</plugins>
317360
</build>
361+
362+
<profiles>
363+
<profile>
364+
<id>docker-tests</id>
365+
<build>
366+
<plugins>
367+
<plugin>
368+
<groupId>org.apache.maven.plugins</groupId>
369+
<artifactId>maven-surefire-plugin</artifactId>
370+
<configuration>
371+
<skipTests>true</skipTests>
372+
</configuration>
373+
</plugin>
374+
<plugin>
375+
<groupId>org.apache.maven.plugins</groupId>
376+
<artifactId>maven-failsafe-plugin</artifactId>
377+
<configuration>
378+
<groups>docker-test</groups>
379+
<includes>
380+
<!-- Search for Docker tests intead of just IT*/*IT -->
381+
<include>*DockerTest*</include>
382+
</includes>
383+
<redirectTestOutputToFile>true</redirectTestOutputToFile>
384+
</configuration>
385+
<executions>
386+
<execution>
387+
<goals>
388+
<goal>integration-test</goal>
389+
<goal>verify</goal>
390+
</goals>
391+
</execution>
392+
</executions>
393+
</plugin>
394+
</plugins>
395+
</build>
396+
</profile>
397+
</profiles>
318398
</project>

embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@
112112
public class AutoCompactionTest extends CompactionTestBase
113113
{
114114
private static final Logger LOG = new Logger(AutoCompactionTest.class);
115-
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.BASIC_INDEX;
115+
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS;
116116

117117
private static final Supplier<TaskBuilder.Index> INDEX_TASK_WITH_GRANULARITY_SPEC =
118118
() -> INDEX_TASK.get().dimensions("language").dynamicPartitionWithMaxRows(10);

embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/CompactionTaskTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@
6161

6262
public class CompactionTaskTest extends CompactionTestBase
6363
{
64-
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.BASIC_INDEX;
64+
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS;
6565

6666
private static final List<Pair<String, String>> INDEX_QUERIES_RESOURCE = List.of(
6767
Pair.of(Resources.Query.SELECT_MIN_MAX_TIME, "2013-08-31T01:02:33.000Z,2013-09-01T12:41:27.000Z"),
@@ -102,7 +102,7 @@ public class CompactionTaskTest extends CompactionTestBase
102102
.ioConfig(new CompactionIntervalSpec(Intervals.of("2013-08-31/2013-09-02"), null), true);
103103

104104
private static final Supplier<TaskBuilder.Index> INDEX_TASK_WITH_TIMESTAMP =
105-
() -> MoreResources.Task.BASIC_INDEX.get().dimensions(
105+
() -> MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS.get().dimensions(
106106
"page",
107107
"language", "user", "unpatrolled", "newPage", "robot", "anonymous",
108108
"namespace", "continent", "country", "region", "city", "timestamp"

0 commit comments

Comments
 (0)