Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
a7698e3
Add DruidContainers to run docker tests with embedded-test framework
kfaraz Jul 21, 2025
bfd4f6a
Use correct image versions
kfaraz Jul 21, 2025
3508e64
Clean up workflow
kfaraz Jul 28, 2025
099c47b
Simplify DruidContainerResource
kfaraz Jul 28, 2025
a926d9c
Merge branch 'master' of github.com:apache/druid into add_druid_conta…
kfaraz Jul 29, 2025
16b649e
Use URIBuilder for URIs, simplify deps
kfaraz Jul 29, 2025
7965826
Use Arrays.toString
kfaraz Jul 29, 2025
2006e4f
Merge branch 'master' of github.com:apache/druid into add_druid_conta…
kfaraz Jul 29, 2025
d35c36b
Add CliEventCollector as a custom node role
kfaraz Jul 30, 2025
e9eec86
Remove CliCustomNodeRole from revised ITs
kfaraz Jul 30, 2025
2678c69
Add profile to run docker tests, skip them by default
kfaraz Jul 31, 2025
ac3df1e
Add more verifications
kfaraz Jul 31, 2025
2ec86f4
Add PostgreSQLMetadataResource
kfaraz Aug 1, 2025
a0b7b63
Clean up
kfaraz Aug 1, 2025
3654904
Merge branch 'master' of github.com:apache/druid into add_druid_conta…
kfaraz Aug 1, 2025
14e3c6b
Add class EmbeddedHostname
kfaraz Aug 1, 2025
a9c1b23
Merge branch 'master' of github.com:apache/druid into add_druid_conta…
kfaraz Aug 2, 2025
28c8dab
Fix compilation
kfaraz Aug 2, 2025
bf77c43
Remove event collector from DruidCommand
kfaraz Aug 4, 2025
8ae997a
Comment fixes
kfaraz Aug 6, 2025
e81c1f8
Add IngestionSmokeTest, always run backward compat test
kfaraz Aug 7, 2025
35c8d40
Merge branch 'master' of github.com:apache/druid into add_druid_conta…
kfaraz Aug 7, 2025
aba2e27
Remove conflicts
kfaraz Aug 7, 2025
7178779
Fix typo in docker test workflow
kfaraz Aug 7, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions .github/scripts/run_docker-tests
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/bin/bash

# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Runs Docker tests
# Requires environment variable DRUID_DIST_IMAGE_NAME

set -e

DRUID_IMAGE_NAME_ENV="DRUID_DIST_IMAGE_NAME"
DRUID_IMAGE_NAME=$DRUID_DIST_IMAGE_NAME

DRUID_IMAGE_SYS_PROPERTY="druid.testing.docker.image"

if [ -z "${DRUID_IMAGE_NAME}" ]; then
echo "ERROR!! Environment variable [$DRUID_IMAGE_NAME_ENV] not set!"
echo "Run 'export $DRUID_IMAGE_NAME_ENV=<druid-image-to-test>' to specify the image to use in the Docker tests."
exit 1
else
echo "Running Docker tests with image[$DRUID_IMAGE_NAME]"
fi

OPTS+="-pl embedded-tests"
mvn -B $OPTS verify -Pdocker-tests -D$DRUID_IMAGE_SYS_PROPERTY=$DRUID_IMAGE_NAME "-DjfrProfilerArgLine=$JFR_PROFILER_ARG_LINE" "$@"
4 changes: 2 additions & 2 deletions .github/workflows/cron-job-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ jobs:
strategy:
fail-fast: false
matrix:
testing_group: [ query, query-retry, query-error, security, high-availability ]
testing_group: [ query, query-retry, query-error, security ]
uses: ./.github/workflows/reusable-standard-its.yml
needs: build
with:
Expand All @@ -109,7 +109,7 @@ jobs:
with:
build_jdk: 17
runtime_jdk: 17
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,high-availability,custom-coordinator-duties
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,custom-coordinator-duties
use_indexer: ${{ matrix.indexer }}
group: other

Expand Down
84 changes: 84 additions & 0 deletions .github/workflows/docker-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

name: "Docker Tests using Distribution Image"
on:
workflow_call:

jobs:
run-docker-tests:
name: Run Docker tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set Docker image env var
run: echo "DRUID_DIST_IMAGE_NAME=apache/druid:docker-tests" >> $GITHUB_ENV
- name: Build the Docker image
run: DOCKER_BUILDKIT=1 docker build -t $DRUID_DIST_IMAGE_NAME -f distribution/docker/Dockerfile .
- name: Save Docker image to archive
run: |
echo "Saving image $DRUID_DIST_IMAGE_NAME in archive druid-dist-container.tar.gz"
docker save "$DRUID_DIST_IMAGE_NAME" | gzip > druid-dist-container.tar.gz
- name: Stop and remove Druid Docker containers
run: |
echo "Force stopping all Druid containers and pruning"
docker ps -aq --filter "ancestor=apache/druid" | xargs -r docker rm -f
docker system prune -af --volumes
- name: Load Docker image
run: |
docker load --input druid-dist-container.tar.gz
docker images
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: 'zulu'
java-version: 17
cache: 'maven'
- name: Run Docker tests
id: run-it
run: .github/scripts/run_docker-tests
timeout-minutes: 60

- name: Collect docker logs on failure
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
run: |
mkdir docker-logs
for c in $(docker ps -a --format="{{.Names}}")
do
docker logs $c > ./docker-logs/$c.log
done
Comment on lines +54 to +61
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you integrate this into the worker.yml somehow: put it inside a script and configure the artifact collector to save that as well..


- name: Tar docker logs
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
run: tar cvzf ./docker-logs.tgz ./docker-logs

- name: Upload docker logs to GitHub
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: failure-docker-logs
path: docker-logs.tgz

- name: Collect surefire reports on failure
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
run: |
tar cvzf ./surefire-logs.tgz ./embedded-tests/target/surefire-reports

- name: Upload surefire reports to GitHub
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
uses: actions/upload-artifact@v4
with:
name: failure-surefire-logs
path: surefire-logs.tgz
2 changes: 1 addition & 1 deletion .github/workflows/revised-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
fail-fast: false
matrix:
jdk: [17]
it: [HighAvailability, MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Query, DruidExactCountBitmap]
it: [MultiStageQuery, Catalog, BatchIndex, MultiStageQueryWithMM, InputSource, InputFormat, Query, DruidExactCountBitmap]
indexer: [middleManager]
uses: ./.github/workflows/reusable-revised-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/standard-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:
strategy:
fail-fast: false
matrix:
testing_group: [query, query-retry, query-error, security, high-availability, centralized-datasource-schema]
testing_group: [query, query-retry, query-error, security, centralized-datasource-schema]
uses: ./.github/workflows/reusable-standard-its.yml
if: ${{ needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true' }}
with:
Expand Down Expand Up @@ -181,6 +181,6 @@ jobs:
with:
build_jdk: 17
runtime_jdk: 17
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,high-availability,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled
testing_groups: -DexcludedGroups=batch-index,input-format,input-source,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,custom-coordinator-duties,centralized-datasource-schema,cds-task-schema-publish-disabled,cds-coordinator-metadata-query-disabled
use_indexer: ${{ matrix.indexer }}
group: other
6 changes: 5 additions & 1 deletion .github/workflows/unit-and-integration-tests-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,12 @@ jobs:
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}

docker-tests:
needs: [build, unit-tests]
uses: ./.github/workflows/docker-tests.yml

actions-timeline:
needs: [build, unit-tests, revised-its, standard-its]
needs: [build, unit-tests, revised-its, standard-its, docker-tests]
runs-on: ubuntu-latest
if: ${{ !cancelled() }}
steps:
Expand Down
7 changes: 7 additions & 0 deletions distribution/docker/druid.sh
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,13 @@ SCONFIG=$(eval echo \$$(echo $SCONFIG))

if [ -n "${SCONFIG}" ]
then
# Create service conf directory as it may not exist for custom node roles
if [ ! -d "$SERVICE_CONF_DIR" ]
then
echo "Creating conf directory '$SERVICE_CONF_DIR'"
mkdir -p $SERVICE_CONF_DIR
fi

cp -f "${SCONFIG}" $SERVICE_CONF_DIR/runtime.properties
fi

Expand Down
80 changes: 80 additions & 0 deletions embedded-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,28 @@
<version>${project.parent.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>postgresql-metadata-storage</artifactId>
<version>${project.parent.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-s3-extensions</artifactId>
<version>${project.parent.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-testcontainers</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-testing-tools</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>org.apache.druid.extensions</groupId>
<artifactId>druid-basic-security</artifactId>
Expand All @@ -100,6 +116,14 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
Expand Down Expand Up @@ -215,6 +239,11 @@
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
<dependency>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-launcher</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-all</artifactId>
Expand Down Expand Up @@ -268,6 +297,12 @@
<version>${testcontainers.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>postgresql</artifactId>
<version>${testcontainers.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>kafka</artifactId>
Expand Down Expand Up @@ -313,6 +348,51 @@
<skip>true</skip>
</configuration>
</plugin>
<!-- Do not run Docker tests except with the docker-tests profile -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<excludedGroups>docker-test</excludedGroups>
</configuration>
</plugin>
</plugins>
</build>

<profiles>
<profile>
<id>docker-tests</id>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<configuration>
<groups>docker-test</groups>
<includes>
<!-- Search for Docker tests intead of just IT*/*IT -->
<include>*DockerTest*</include>
</includes>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@
public class AutoCompactionTest extends CompactionTestBase
{
private static final Logger LOG = new Logger(AutoCompactionTest.class);
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.BASIC_INDEX;
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS;

private static final Supplier<TaskBuilder.Index> INDEX_TASK_WITH_GRANULARITY_SPEC =
() -> INDEX_TASK.get().dimensions("language").dynamicPartitionWithMaxRows(10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@

public class CompactionTaskTest extends CompactionTestBase
{
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.BASIC_INDEX;
private static final Supplier<TaskBuilder.Index> INDEX_TASK = MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS;

private static final List<Pair<String, String>> INDEX_QUERIES_RESOURCE = List.of(
Pair.of(Resources.Query.SELECT_MIN_MAX_TIME, "2013-08-31T01:02:33.000Z,2013-09-01T12:41:27.000Z"),
Expand Down Expand Up @@ -102,7 +102,7 @@ public class CompactionTaskTest extends CompactionTestBase
.ioConfig(new CompactionIntervalSpec(Intervals.of("2013-08-31/2013-09-02"), null), true);

private static final Supplier<TaskBuilder.Index> INDEX_TASK_WITH_TIMESTAMP =
() -> MoreResources.Task.BASIC_INDEX.get().dimensions(
() -> MoreResources.Task.INDEX_TASK_WITH_AGGREGATORS.get().dimensions(
"page",
"language", "user", "unpatrolled", "newPage", "robot", "anonymous",
"namespace", "continent", "country", "region", "city", "timestamp"
Expand Down
Loading