diff --git a/.asf.yaml b/.asf.yaml
index 5a7f5ff722117..5d88fe28742fb 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -29,16 +29,16 @@ notifications:
# Read more here: https://github.com/apache/infrastructure-asfyaml
github:
collaborators:
- - brandboat
- - FrankYang0529
- - gongxuanzhang
- m1a2st
- - mingyen066
- - ShivsundarR
- smjn
- TaiJuWu
- - xijiu
+ - brandboat
- Yunyung
+ - xijiu
+ - chirag-wadhwa5
+ - mingyen066
+ - ShivsundarR
+ - Rancho-7
enabled_merge_buttons:
squash: true
squash_commit_message: PR_TITLE_AND_DESC
diff --git a/.github/actions/run-gradle/action.yml b/.github/actions/run-gradle/action.yml
index 8422defb273cf..9c8e0945184bb 100644
--- a/.github/actions/run-gradle/action.yml
+++ b/.github/actions/run-gradle/action.yml
@@ -83,6 +83,9 @@ runs:
RUN_FLAKY_TESTS: ${{ inputs.run-flaky-tests }}
TEST_XML_OUTPUT_DIR: ${{ inputs.test-xml-output }}
TEST_VERBOSE: ${{ inputs.test-verbose }}
+ # This build step is invoked by build.yml to run junit tests only,
+ # Spotbugs is being run by that workflow via the "check" task and does not need to also be run here,
+ # since that is redundant.
run: |
set +e
./.github/scripts/thread-dump.sh &
@@ -97,6 +100,8 @@ runs:
-Pkafka.cluster.test.repeat=$TEST_REPEAT \
-Pkafka.test.verbose=$TEST_VERBOSE \
-PcommitId=xxxxxxxxxxxxxxxx \
+ -x spotbugsMain \
+ -x spotbugsTest \
$TEST_TASK
exitcode="$?"
echo "exitcode=$exitcode" >> $GITHUB_OUTPUT
diff --git a/.github/actions/setup-gradle/action.yml b/.github/actions/setup-gradle/action.yml
index 1a5b0902703ab..fe456568066e6 100644
--- a/.github/actions/setup-gradle/action.yml
+++ b/.github/actions/setup-gradle/action.yml
@@ -37,7 +37,7 @@ runs:
using: "composite"
steps:
- name: Setup Java
- uses: actions/setup-java@v4
+ uses: actions/setup-java@v5
with:
distribution: temurin
java-version: ${{ inputs.java-version }}
diff --git a/.github/actions/setup-python/action.yml b/.github/actions/setup-python/action.yml
index d7e326314c1be..10c55f6e083b5 100644
--- a/.github/actions/setup-python/action.yml
+++ b/.github/actions/setup-python/action.yml
@@ -22,7 +22,7 @@ runs:
using: "composite"
steps:
- name: Setup Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: 3.12
- name: Pip install
diff --git a/.github/configs/labeler.yml b/.github/configs/labeler.yml
index 24a7a643c9042..b988967f33788 100644
--- a/.github/configs/labeler.yml
+++ b/.github/configs/labeler.yml
@@ -92,6 +92,12 @@ transactions:
- any-glob-to-any-file:
- 'transaction-coordinator/**'
+group-coordinator:
+ - changed-files:
+ - any-glob-to-any-file:
+ - 'group-coordinator/**'
+ - 'coordinator-common/**'
+
kip-932:
- changed-files:
- any-glob-to-any-file:
diff --git a/.github/scripts/junit.py b/.github/scripts/junit.py
index 95b5d4e4610de..550ea4935116d 100644
--- a/.github/scripts/junit.py
+++ b/.github/scripts/junit.py
@@ -361,7 +361,7 @@ def split_report_path(base_path: str, report_path: str) -> Tuple[str, str]:
failure_messages.append(f"Gradle task had a failure exit code. Failing this script.")
if thread_dump_url:
- failure_messages.append(f"Thread dump available at {thread_dump_url}. Failing this script.")
+ failure_messages.append(f"Thread dump available at {thread_dump_url} and the script will now fail.")
if junit_report_url:
report_md = f"Download [JUnit HTML report]({junit_report_url})"
diff --git a/.github/scripts/pr-format.py b/.github/scripts/pr-format.py
index 7793f8c8378db..d2da5e3e5bff3 100644
--- a/.github/scripts/pr-format.py
+++ b/.github/scripts/pr-format.py
@@ -97,6 +97,8 @@ def split_paragraphs(text: str):
else:
if line[0] in ("#", "*", "-", "=") or line[0].isdigit():
markdown = True
+ if "```" in line:
+ markdown = True
paragraph.append(line)
yield paragraph, markdown
diff --git a/.github/scripts/requirements.txt b/.github/scripts/requirements.txt
index d59455f79dac6..d3fcf50bb7400 100644
--- a/.github/scripts/requirements.txt
+++ b/.github/scripts/requirements.txt
@@ -12,6 +12,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+# Note: Ensure the 'requests' version here matches the version in tests/setup.py
PyYAML~=6.0
pytz==2024.2
-requests==2.32.3
+requests==2.32.4
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index fc5ee1bd7558b..47bb2cbc31d5d 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -66,12 +66,12 @@ jobs:
name: Load Test Catalog
steps:
- name: Checkout main
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
- name: Checkout test-catalog
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
ref: 'test-catalog'
persist-credentials: false
@@ -118,7 +118,7 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
ref: ${{ github.sha }} # this is the default, just being explicit.
@@ -127,7 +127,7 @@ jobs:
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
- java-version: 23
+ java-version: 24
gradle-cache-read-only: ${{ !inputs.is-trunk }}
gradle-cache-write-only: ${{ inputs.is-trunk }}
develocity-access-key: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
@@ -181,7 +181,7 @@ jobs:
fail-fast: false
matrix:
# If we change these, make sure to adjust ci-complete.yml
- java: [ 23, 17 ]
+ java: [ 24, 17 ]
run-flaky: [ true, false ]
run-new: [ true, false ]
exclude:
@@ -192,7 +192,7 @@ jobs:
name: JUnit tests Java ${{ matrix.java }}${{ matrix.run-flaky == true && ' (flaky)' || '' }}${{ matrix.run-new == true && ' (new)' || '' }}
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
ref: ${{ needs.configure.outputs.sha }}
@@ -210,7 +210,7 @@ jobs:
# the overall workflow, so we'll continue here without a test catalog.
- name: Load Test Catalog
id: load-test-catalog
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
continue-on-error: true
with:
name: combined-test-catalog
@@ -270,7 +270,7 @@ jobs:
python .github/scripts/junit.py \
--path build/junit-xml >> $GITHUB_STEP_SUMMARY
- # This job downloads all the JUnit XML files and thread dumps from the JDK 23 test runs.
+ # This job downloads all the JUnit XML files and thread dumps from the JDK 24 test runs.
# If any test job fails, we will not run this job. Also, if any thread dump artifacts
# are present, this means there was a timeout in the tests and so we will not proceed
# with catalog creation.
@@ -282,13 +282,13 @@ jobs:
uploaded-test-catalog: ${{ steps.archive-test-catalog.outcome == 'success' }}
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
- name: Download Thread Dumps
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
- pattern: junit-thread-dumps-23-*
+ pattern: junit-thread-dumps-24-*
path: thread-dumps
merge-multiple: true
- name: Check For Thread Dump
@@ -300,9 +300,9 @@ jobs:
exit 1;
fi
- name: Download JUnit XMLs
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
- pattern: junit-xml-23-* # Only look at JDK 23 tests for the test catalog
+ pattern: junit-xml-24-* # Only look at JDK 24 tests for the test catalog
path: junit-xml
merge-multiple: true
- name: Collate Test Catalog
@@ -334,7 +334,7 @@ jobs:
contents: write
steps:
- name: Checkout Test Catalog
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: true # Needed to commit and push later
ref: test-catalog
@@ -342,7 +342,7 @@ jobs:
run: |
rm -rf test-catalog
- name: Download Test Catalog
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
name: test-catalog
path: test-catalog
diff --git a/.github/workflows/ci-complete.yml b/.github/workflows/ci-complete.yml
index 8855c998df28b..6b8492fb7c0ab 100644
--- a/.github/workflows/ci-complete.yml
+++ b/.github/workflows/ci-complete.yml
@@ -44,7 +44,7 @@ jobs:
fail-fast: false
matrix:
# Make sure these match build.yml
- java: [ 23, 17 ]
+ java: [ 24, 17 ]
run-flaky: [ true, false ]
run-new: [ true, false ]
exclude:
@@ -61,7 +61,7 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials:
false
@@ -72,7 +72,7 @@ jobs:
develocity-access-key: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Download build scan archive
id: download-build-scan
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
continue-on-error: true # Don't want this step to fail the overall workflow
with:
github-token: ${{ github.token }}
diff --git a/.github/workflows/deflake.yml b/.github/workflows/deflake.yml
index f58408a04739d..3a2fbb56345b2 100644
--- a/.github/workflows/deflake.yml
+++ b/.github/workflows/deflake.yml
@@ -42,7 +42,7 @@ jobs:
name: Deflake JUnit tests
steps:
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
diff --git a/.github/workflows/docker_build_and_test.yml b/.github/workflows/docker_build_and_test.yml
index 67acdf9fb7424..6a1b2f7de25f1 100644
--- a/.github/workflows/docker_build_and_test.yml
+++ b/.github/workflows/docker_build_and_test.yml
@@ -32,9 +32,9 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up Python 3.10
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3.10"
- name: Setup Docker Compose
diff --git a/.github/workflows/docker_official_image_build_and_test.yml b/.github/workflows/docker_official_image_build_and_test.yml
index 58866a19d6cab..1580ea1f744ba 100644
--- a/.github/workflows/docker_official_image_build_and_test.yml
+++ b/.github/workflows/docker_official_image_build_and_test.yml
@@ -31,9 +31,9 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up Python 3.10
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3.10"
- name: Setup Docker Compose
diff --git a/.github/workflows/docker_rc_release.yml b/.github/workflows/docker_rc_release.yml
index 1f824b39b977a..da851f4a43028 100644
--- a/.github/workflows/docker_rc_release.yml
+++ b/.github/workflows/docker_rc_release.yml
@@ -37,9 +37,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Set up Python 3.10
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3.10"
- name: Install dependencies
diff --git a/.github/workflows/docker_scan.yml b/.github/workflows/docker_scan.yml
index db729c5c3ae01..55df9f65e4c08 100644
--- a/.github/workflows/docker_scan.yml
+++ b/.github/workflows/docker_scan.yml
@@ -26,7 +26,7 @@ jobs:
strategy:
matrix:
# This is an array of supported tags. Make sure this array only contains the supported tags
- supported_image_tag: ['latest', '3.7.2', '3.8.1', '3.9.0', '4.0.0']
+ supported_image_tag: ['latest', '3.9.1', '4.0.0', '4.1.0']
steps:
- name: Run CVE scan
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
diff --git a/.github/workflows/generate-reports.yml b/.github/workflows/generate-reports.yml
index a3ae34fcecb80..dee7094c27c15 100644
--- a/.github/workflows/generate-reports.yml
+++ b/.github/workflows/generate-reports.yml
@@ -32,7 +32,7 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
- name: Setup Python
uses: ./.github/actions/setup-python
- name: Run Report
diff --git a/.github/workflows/pr-labeled.yml b/.github/workflows/pr-labeled.yml
index 87b39a659ec0c..b5695825861ff 100644
--- a/.github/workflows/pr-labeled.yml
+++ b/.github/workflows/pr-labeled.yml
@@ -35,7 +35,7 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials:
false
diff --git a/.github/workflows/pr-linter.yml b/.github/workflows/pr-linter.yml
index f19efbfabe2c1..d38a9659a01f9 100644
--- a/.github/workflows/pr-linter.yml
+++ b/.github/workflows/pr-linter.yml
@@ -31,12 +31,12 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials: false
- name: Load PR Number
id: load-pr-number
- uses: actions/download-artifact@v4
+ uses: actions/download-artifact@v5
with:
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
diff --git a/.github/workflows/pr-update.yml b/.github/workflows/pr-update.yml
index e1cd7214d6c36..7b45a15d19126 100644
--- a/.github/workflows/pr-update.yml
+++ b/.github/workflows/pr-update.yml
@@ -37,8 +37,8 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
- uses: actions/checkout@v4
- - uses: actions/labeler@v5
+ uses: actions/checkout@v5
+ - uses: actions/labeler@v6
with:
configuration-path: .github/configs/labeler.yml
- name: check small label
diff --git a/.github/workflows/prepare_docker_official_image_source.yml b/.github/workflows/prepare_docker_official_image_source.yml
index 32f21a0afd0bf..82204b9b93597 100644
--- a/.github/workflows/prepare_docker_official_image_source.yml
+++ b/.github/workflows/prepare_docker_official_image_source.yml
@@ -31,9 +31,9 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
- name: Set up Python 3.10
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3.10"
- name: Install dependencies
diff --git a/.github/workflows/workflow-requested.yml b/.github/workflows/workflow-requested.yml
index 6463c72ab0acc..aae00f9ece7b0 100644
--- a/.github/workflows/workflow-requested.yml
+++ b/.github/workflows/workflow-requested.yml
@@ -38,7 +38,7 @@ jobs:
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
- uses: actions/checkout@v4
+ uses: actions/checkout@v5
with:
persist-credentials:
false
diff --git a/LICENSE-binary b/LICENSE-binary
index 6175d3ed7d479..c6078a48bca75 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -205,53 +205,54 @@
This project bundles some components that are also licensed under the Apache
License Version 2.0:
-- caffeine-3.1.8
-- commons-beanutils-1.9.4
+- caffeine-3.2.0
+- commons-beanutils-1.11.0
- commons-collections-3.2.2
- commons-digester-2.1
-- commons-lang3-3.12.0
-- commons-logging-1.3.2
-- commons-validator-1.9.0
-- error_prone_annotations-2.21.1
-- jackson-annotations-2.16.2
-- jackson-core-2.16.2
-- jackson-databind-2.16.2
-- jackson-dataformat-csv-2.16.2
-- jackson-dataformat-yaml-2.16.2
-- jackson-datatype-jdk8-2.16.2
-- jackson-jakarta-rs-base-2.16.2
-- jackson-jakarta-rs-json-provider-2.16.2
-- jackson-module-blackbird-2.16.2
-- jackson-module-jakarta-xmlbind-annotations-2.16.2
+- commons-lang3-3.18.0
+- commons-logging-1.3.5
+- commons-validator-1.10.0
+- hash4j-0.22.0
+- jackson-annotations-2.19.0
+- jackson-core-2.19.0
+- jackson-databind-2.19.0
+- jackson-dataformat-csv-2.19.0
+- jackson-dataformat-yaml-2.19.0
+- jackson-datatype-jdk8-2.19.0
+- jackson-jakarta-rs-base-2.19.0
+- jackson-jakarta-rs-json-provider-2.19.0
+- jackson-module-blackbird-2.19.0
+- jackson-module-jakarta-xmlbind-annotations-2.19.0
- jakarta.inject-api-2.0.1
- jakarta.validation-api-3.0.2
-- javassist-3.29.2-GA
-- jetty-alpn-client-12.0.15
-- jetty-client-12.0.15
-- jetty-ee10-servlet-12.0.15
-- jetty-ee10-servlets-12.0.15
-- jetty-http-12.0.15
-- jetty-io-12.0.15
-- jetty-security-12.0.15
-- jetty-server-12.0.15
-- jetty-session-12.0.15
-- jetty-util-12.0.15
-- jose4j-0.9.4
-- log4j-api-2.24.3
-- log4j-core-2.24.3
-- log4j-slf4j-impl-2.24.3
-- log4j-1.2-api-2.24.3
+- javassist-3.30.2-GA
+- jetty-alpn-client-12.0.22
+- jetty-client-12.0.22
+- jetty-ee10-servlet-12.0.22
+- jetty-ee10-servlets-12.0.22
+- jetty-http-12.0.22
+- jetty-io-12.0.22
+- jetty-security-12.0.22
+- jetty-server-12.0.22
+- jetty-session-12.0.22
+- jetty-util-12.0.22
+- jose4j-0.9.6
+- jspecify-1.0.0
+- log4j-api-2.25.1
+- log4j-core-2.25.1
+- log4j-slf4j-impl-2.25.1
+- log4j-1.2-api-2.25.1
- lz4-java-1.8.0
- maven-artifact-3.9.6
- metrics-core-2.2.0
-- opentelemetry-proto-1.0.0-alpha
+- opentelemetry-proto-1.3.2-alpha
- plexus-utils-3.5.1
-- rocksdbjni-9.7.3
-- scala-library-2.13.15
+- rocksdbjni-10.1.3
+- scala-library-2.13.16
- scala-logging_2.13-3.9.5
-- scala-reflect-2.13.15
-- snappy-java-1.1.10.5
-- snakeyaml-2.2
+- scala-reflect-2.13.16
+- snappy-java-1.1.10.7
+- snakeyaml-2.4
- swagger-annotations-2.2.25
===============================================================================
@@ -277,12 +278,12 @@ see: licenses/eclipse-public-license-2.0
- hk2-utils-3.0.6
- osgi-resource-locator-1.0.3
- aopalliance-repackaged-3.0.6
-- jersey-client-3.1.9
-- jersey-common-3.1.9
-- jersey-container-servlet-3.1.9
-- jersey-container-servlet-core-3.1.9
-- jersey-hk2-3.1.9
-- jersey-server-3.1.9
+- jersey-client-3.1.10
+- jersey-common-3.1.10
+- jersey-container-servlet-3.1.10
+- jersey-container-servlet-core-3.1.10
+- jersey-hk2-3.1.10
+- jersey-server-3.1.10
---------------------------------------
CDDL 1.1 + GPLv2 with classpath exception
@@ -298,10 +299,10 @@ see: licenses/CDDL+GPL-1.1
MIT License
- argparse4j-0.7.0, see: licenses/argparse-MIT
-- classgraph-4.8.173, see: licenses/classgraph-MIT
+- classgraph-4.8.179, see: licenses/classgraph-MIT
- jopt-simple-5.0.4, see: licenses/jopt-simple-MIT
- slf4j-api-1.7.36, see: licenses/slf4j-MIT
-- pcollections-4.0.1, see: licenses/pcollections-MIT
+- pcollections-4.0.2, see: licenses/pcollections-MIT
---------------------------------------
BSD 2-Clause
@@ -312,11 +313,11 @@ BSD 2-Clause
---------------------------------------
BSD 3-Clause
-- jline-3.25.1, see: licenses/jline-BSD-3-clause
+- jline-3.30.4, see: licenses/jline-BSD-3-clause
- protobuf-java-3.25.5, see: licenses/protobuf-java-BSD-3-clause
- jakarta.activation-2.0.1, see: licenses/jakarta-BSD-3-clause
---------------------------------------
Go License
-- re2j-1.7, see: licenses/re2j-GO
+- re2j-1.8, see: licenses/re2j-GO
diff --git a/README.md b/README.md
index 4c2fc4e1f160b..06c0e3921ebc1 100644
--- a/README.md
+++ b/README.md
@@ -13,7 +13,7 @@
You need to have [Java](http://www.oracle.com/technetwork/java/javase/downloads/index.html) installed.
-We build and test Apache Kafka with 17 and 23. The `release` parameter in javac is set to `11` for the clients
+We build and test Apache Kafka with 17 and 24. The `release` parameter in javac is set to `11` for the clients
and streams modules, and `17` for the rest, ensuring compatibility with their respective
minimum Java versions. Similarly, the `release` parameter in scalac is set to `11` for the streams modules and `17`
for the rest.
@@ -52,6 +52,7 @@ Follow instructions in https://kafka.apache.org/quickstart
### Running a particular unit/integration test ###
./gradlew clients:test --tests RequestResponseTest
+ ./gradlew streams:integration-tests:test --tests RestoreIntegrationTest
### Repeatedly running a particular unit/integration test with specific times by setting N ###
N=500; I=0; while [ $I -lt $N ] && ./gradlew clients:test --tests RequestResponseTest --rerun --fail-fast; do (( I=$I+1 )); echo "Completed run: $I"; sleep 1; done
@@ -59,6 +60,7 @@ Follow instructions in https://kafka.apache.org/quickstart
### Running a particular test method within a unit/integration test ###
./gradlew core:test --tests kafka.api.ProducerFailureHandlingTest.testCannotSendToInternalTopic
./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate
+ ./gradlew streams:integration-tests:test --tests org.apache.kafka.streams.integration.RestoreIntegrationTest.shouldRestoreNullRecord
### Running a particular unit/integration test with log4j output ###
By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.yaml` file in the module's `src/test/resources` directory.
diff --git a/bin/kafka-run-class.sh b/bin/kafka-run-class.sh
index 3463389d3c005..0a5ecfae04e20 100755
--- a/bin/kafka-run-class.sh
+++ b/bin/kafka-run-class.sh
@@ -49,7 +49,7 @@ should_include_file() {
base_dir=$(dirname $0)/..
if [ -z "$SCALA_VERSION" ]; then
- SCALA_VERSION=2.13.15
+ SCALA_VERSION=2.13.16
if [[ -f "$base_dir/gradle.properties" ]]; then
SCALA_VERSION=`grep "^scalaVersion=" "$base_dir/gradle.properties" | cut -d= -f 2`
fi
@@ -282,7 +282,7 @@ fi
# JVM performance options
# MaxInlineLevel=15 is the default since JDK 14 and can be removed once older JDKs are no longer supported
if [ -z "$KAFKA_JVM_PERFORMANCE_OPTS" ]; then
- KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15 -Djava.awt.headless=true"
+ KAFKA_JVM_PERFORMANCE_OPTS="-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -XX:MaxInlineLevel=15"
fi
while [ $# -gt 0 ]; do
diff --git a/bin/windows/kafka-run-class.bat b/bin/windows/kafka-run-class.bat
index ca151e5df96ed..a73ae2b26f2d7 100755
--- a/bin/windows/kafka-run-class.bat
+++ b/bin/windows/kafka-run-class.bat
@@ -27,7 +27,7 @@ set BASE_DIR=%CD%
popd
IF ["%SCALA_VERSION%"] EQU [""] (
- set SCALA_VERSION=2.13.15
+ set SCALA_VERSION=2.13.16
)
IF ["%SCALA_BINARY_VERSION%"] EQU [""] (
@@ -177,7 +177,7 @@ IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
rem JVM performance options
IF ["%KAFKA_JVM_PERFORMANCE_OPTS%"] EQU [""] (
- set KAFKA_JVM_PERFORMANCE_OPTS=-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent -Djava.awt.headless=true
+ set KAFKA_JVM_PERFORMANCE_OPTS=-server -XX:+UseG1GC -XX:MaxGCPauseMillis=20 -XX:InitiatingHeapOccupancyPercent=35 -XX:+ExplicitGCInvokesConcurrent
)
IF not defined CLASSPATH (
diff --git a/build.gradle b/build.gradle
index 2e35057165c53..8d18d0ea7b849 100644
--- a/build.gradle
+++ b/build.gradle
@@ -14,8 +14,6 @@
// limitations under the License.
import org.ajoberstar.grgit.Grgit
-import org.gradle.api.JavaVersion
-
import java.nio.charset.StandardCharsets
buildscript {
@@ -39,7 +37,7 @@ plugins {
id 'org.nosphere.apache.rat' version "0.8.1"
id "io.swagger.core.v3.swagger-gradle-plugin" version "${swaggerVersion}"
- id "com.github.spotbugs" version '6.0.25' apply false
+ id "com.github.spotbugs" version '6.2.3' apply false
id 'org.scoverage' version '8.0.3' apply false
id 'com.gradleup.shadow' version '8.3.6' apply false
id 'com.diffplug.spotless' version "6.25.0"
@@ -49,7 +47,7 @@ ext {
gradleVersion = versions.gradle
minClientJavaVersion = 11
minNonClientJavaVersion = 17
- modulesNeedingJava11 = [":clients", ":generator", ":streams", ":streams:test-utils", ":streams-scala", ":test-common:test-common-util"]
+ modulesNeedingJava11 = [":clients", ":generator", ":streams", ":streams:test-utils", ":streams:examples", ":streams-scala", ":test-common:test-common-util"]
buildVersionFileName = "kafka-version.properties"
@@ -159,7 +157,7 @@ ext {
libs.log4j2Api,
libs.log4j2Core
]
-
+
}
allprojects {
@@ -195,7 +193,8 @@ allprojects {
// ensure we have a single version in the classpath despite transitive dependencies
libs.scalaLibrary,
libs.scalaReflect,
- libs.jacksonAnnotations
+ libs.jacksonAnnotations,
+ libs.commonsLang
)
}
}
@@ -369,7 +368,6 @@ subprojects {
// Fix for avoiding inclusion of runtime dependencies marked as 'shadow' in MANIFEST Class-Path.
// https://github.com/GradleUp/shadow/issues/324
- afterEvaluate {
pom.withXml { xml ->
def dependenciesNode = xml.asNode().get('dependencies') ?: xml.asNode().appendNode('dependencies')
project.configurations.shadowed.allDependencies.each {
@@ -380,7 +378,6 @@ subprojects {
dependencyNode.appendNode('scope', 'runtime')
}
}
- }
}
afterEvaluate {
@@ -485,11 +482,36 @@ subprojects {
}
}
+ // Workaround for Mockito Java Agent restrictions in Java 21+
+ // Starting with Java 21, the JDK restricts libraries from attaching a Java agent
+ // to their own JVM. As a result, Mockito’s inline mock maker (mockito-core)
+ // fails without explicit instrumentation, and the JVM consistently emits warnings.
+ // See also: https://javadoc.io/doc/org.mockito/mockito-core/latest/org.mockito/org/mockito/Mockito.html#mockito-instrumentation
+ afterEvaluate { subproject ->
+ def hasMockitoCore = subproject.configurations.findAll {
+ it.canBeResolved
+ }.any { config ->
+ config.incoming.dependencies.any { dependency ->
+ "$dependency" == libs.mockitoCore
+ }
+ }
+
+ if (hasMockitoCore) {
+ subproject.configurations {
+ mockitoAgent {
+ transitive = false
+ }
+ }
+ subproject.dependencies {
+ mockitoAgent libs.mockitoCore
+ }
+ }
+ }
+
// The suites are for running sets of tests in IDEs.
// Gradle will run each test class, so we exclude the suites to avoid redundantly running the tests twice.
def testsToExclude = ['**/*Suite.class']
-
// This task will copy JUnit XML files out of the sub-project's build directory and into
// a top-level build/junit-xml directory. This is necessary to avoid reporting on tests which
// were not run, but instead were restored via FROM-CACHE. See KAFKA-17479 for more details.
@@ -519,6 +541,14 @@ subprojects {
}
test {
+
+ doFirst {
+ def mockitoAgentConfig = configurations.findByName('mockitoAgent')
+ if (mockitoAgentConfig) {
+ jvmArgs("-javaagent:${mockitoAgentConfig.asPath}")
+ }
+ }
+
maxParallelForks = maxTestForks
ignoreFailures = userIgnoreFailures
@@ -552,7 +582,7 @@ subprojects {
maxFailures = userMaxTestRetryFailures
}
}
-
+
finalizedBy("copyTestXml")
}
@@ -894,6 +924,9 @@ project(':server') {
}
dependencies {
+ compileOnly libs.bndlib
+ compileOnly libs.spotbugs
+
implementation project(':clients')
implementation project(':metadata')
implementation project(':server-common')
@@ -906,6 +939,7 @@ project(':server') {
implementation libs.jacksonDatabind
implementation libs.metrics
implementation libs.slf4jApi
+ implementation log4j2Libs
testImplementation project(':clients').sourceSets.test.output
@@ -915,6 +949,7 @@ project(':server') {
testImplementation project(':test-common:test-common-internal-api')
testImplementation project(':test-common:test-common-runtime')
testImplementation project(':storage:storage-api').sourceSets.test.output
+ testImplementation project(':server-common').sourceSets.test.output
testRuntimeOnly runtimeTestLibs
}
@@ -1024,6 +1059,7 @@ project(':core') {
implementation libs.scalaReflect
implementation libs.scalaLogging
implementation libs.slf4jApi
+ implementation libs.re2j
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':group-coordinator').sourceSets.test.output
@@ -1056,6 +1092,7 @@ project(':core') {
testImplementation libs.junitJupiter
testImplementation libs.caffeine
testImplementation testLog4j2Libs
+ testImplementation libs.mockOAuth2Server
testRuntimeOnly runtimeTestLibs
}
@@ -1143,6 +1180,13 @@ project(':core') {
standardOutput = new File(generatedDocsDir, "topic_config.html").newOutputStream()
}
+ task genGroupConfigDocs(type: JavaExec) {
+ classpath = sourceSets.main.runtimeClasspath
+ mainClass = 'org.apache.kafka.coordinator.group.GroupConfig'
+ if( !generatedDocsDir.exists() ) { generatedDocsDir.mkdirs() }
+ standardOutput = new File(generatedDocsDir, "group_config.html").newOutputStream()
+ }
+
task genConsumerMetricsDocs(type: JavaExec) {
classpath = sourceSets.test.runtimeClasspath
mainClass = 'org.apache.kafka.clients.consumer.internals.ConsumerMetrics'
@@ -1159,7 +1203,7 @@ project(':core') {
task siteDocsTar(dependsOn: ['genProtocolErrorDocs', 'genProtocolTypesDocs', 'genProtocolApiKeyDocs', 'genProtocolMessageDocs',
'genAdminClientConfigDocs', 'genProducerConfigDocs', 'genConsumerConfigDocs',
- 'genKafkaConfigDocs', 'genTopicConfigDocs',
+ 'genKafkaConfigDocs', 'genTopicConfigDocs', 'genGroupConfigDocs',
':connect:runtime:genConnectConfigDocs', ':connect:runtime:genConnectTransformationDocs',
':connect:runtime:genConnectPredicateDocs',
':connect:runtime:genSinkConnectorConfigDocs', ':connect:runtime:genSourceConnectorConfigDocs',
@@ -1420,6 +1464,7 @@ project(':group-coordinator') {
implementation libs.hdrHistogram
implementation libs.re2j
implementation libs.slf4jApi
+ implementation libs.hash4j
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
@@ -1762,11 +1807,11 @@ project(':generator') {
implementation libs.jacksonJDK8Datatypes
implementation libs.jacksonJakartarsJsonProvider
- implementation 'org.eclipse.jgit:org.eclipse.jgit:6.4.0.202211300538-r'
+ implementation 'org.eclipse.jgit:org.eclipse.jgit:7.2.0.202503040940-r'
// SSH support for JGit based on Apache MINA sshd
- implementation 'org.eclipse.jgit:org.eclipse.jgit.ssh.apache:6.4.0.202211300538-r'
+ implementation 'org.eclipse.jgit:org.eclipse.jgit.ssh.apache:7.2.0.202503040940-r'
// GPG support for JGit based on BouncyCastle (commit signing)
- implementation 'org.eclipse.jgit:org.eclipse.jgit.gpg.bc:6.4.0.202211300538-r'
+ implementation 'org.eclipse.jgit:org.eclipse.jgit.gpg.bc:7.2.0.202503040940-r'
testImplementation libs.junitJupiter
@@ -1973,6 +2018,7 @@ project(':clients:clients-integration-tests') {
}
dependencies {
+ testImplementation libs.metrics
testImplementation libs.slf4jApi
testImplementation project(':test-common:test-common-internal-api')
testImplementation project(':test-common:test-common-runtime')
@@ -1982,9 +2028,12 @@ project(':clients:clients-integration-tests') {
testImplementation project(':core').sourceSets.test.output
testImplementation project(':clients').sourceSets.test.output
implementation project(':server-common')
+ testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':metadata')
implementation project(':group-coordinator')
+ implementation project(':group-coordinator:group-coordinator-api')
implementation project(':transaction-coordinator')
+ testImplementation project(':test-common:test-common-util')
testImplementation libs.junitJupiter
testImplementation libs.junitPlatformSuiteEngine
@@ -2230,11 +2279,13 @@ project(':storage') {
}
dependencies {
+ implementation project(':metadata')
implementation project(':storage:storage-api')
implementation project(':server-common')
implementation project(':clients')
implementation(libs.caffeine) {
exclude group: 'org.checkerframework', module: 'checker-qual'
+ exclude group: 'com.google.errorprone', module: 'error_prone_annotations'
}
implementation libs.slf4jApi
implementation libs.jacksonDatabind
@@ -2897,10 +2948,10 @@ project(':streams:examples') {
}
dependencies {
- // this dependency should be removed after we unify data API
- implementation(project(':connect:json'))
implementation project(':streams')
implementation libs.slf4jApi
+ implementation libs.jacksonDatabind
+ implementation libs.jacksonAnnotations
testImplementation project(':streams:test-utils')
testImplementation project(':clients').sourceSets.test.output // for org.apache.kafka.test.IntegrationTest
@@ -2936,6 +2987,7 @@ project(':streams:upgrade-system-tests-0110') {
dependencies {
testImplementation libs.kafkaStreams_0110
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -2951,6 +3003,7 @@ project(':streams:upgrade-system-tests-10') {
dependencies {
testImplementation libs.kafkaStreams_10
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -2966,6 +3019,7 @@ project(':streams:upgrade-system-tests-11') {
dependencies {
testImplementation libs.kafkaStreams_11
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -2981,6 +3035,7 @@ project(':streams:upgrade-system-tests-20') {
dependencies {
testImplementation libs.kafkaStreams_20
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -2996,6 +3051,7 @@ project(':streams:upgrade-system-tests-21') {
dependencies {
testImplementation libs.kafkaStreams_21
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3011,6 +3067,7 @@ project(':streams:upgrade-system-tests-22') {
dependencies {
testImplementation libs.kafkaStreams_22
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3026,6 +3083,7 @@ project(':streams:upgrade-system-tests-23') {
dependencies {
testImplementation libs.kafkaStreams_23
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3041,6 +3099,7 @@ project(':streams:upgrade-system-tests-24') {
dependencies {
testImplementation libs.kafkaStreams_24
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3056,6 +3115,7 @@ project(':streams:upgrade-system-tests-25') {
dependencies {
testImplementation libs.kafkaStreams_25
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3071,6 +3131,7 @@ project(':streams:upgrade-system-tests-26') {
dependencies {
testImplementation libs.kafkaStreams_26
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3086,6 +3147,7 @@ project(':streams:upgrade-system-tests-27') {
dependencies {
testImplementation libs.kafkaStreams_27
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3101,6 +3163,7 @@ project(':streams:upgrade-system-tests-28') {
dependencies {
testImplementation libs.kafkaStreams_28
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3116,6 +3179,7 @@ project(':streams:upgrade-system-tests-30') {
dependencies {
testImplementation libs.kafkaStreams_30
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3131,6 +3195,7 @@ project(':streams:upgrade-system-tests-31') {
dependencies {
testImplementation libs.kafkaStreams_31
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3146,6 +3211,7 @@ project(':streams:upgrade-system-tests-32') {
dependencies {
testImplementation libs.kafkaStreams_32
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3161,6 +3227,7 @@ project(':streams:upgrade-system-tests-33') {
dependencies {
testImplementation libs.kafkaStreams_33
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3176,6 +3243,7 @@ project(':streams:upgrade-system-tests-34') {
dependencies {
testImplementation libs.kafkaStreams_34
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3191,6 +3259,7 @@ project(':streams:upgrade-system-tests-35') {
dependencies {
testImplementation libs.kafkaStreams_35
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3206,6 +3275,7 @@ project(':streams:upgrade-system-tests-36') {
dependencies {
testImplementation libs.kafkaStreams_36
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3221,6 +3291,7 @@ project(':streams:upgrade-system-tests-37') {
dependencies {
testImplementation libs.kafkaStreams_37
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3236,6 +3307,7 @@ project(':streams:upgrade-system-tests-38') {
dependencies {
testImplementation libs.kafkaStreams_38
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3251,6 +3323,7 @@ project(':streams:upgrade-system-tests-39') {
dependencies {
testImplementation libs.kafkaStreams_39
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3266,6 +3339,23 @@ project(':streams:upgrade-system-tests-40') {
dependencies {
testImplementation libs.kafkaStreams_40
testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
+ }
+
+ systemTestLibs {
+ dependsOn testJar
+ }
+}
+
+project(':streams:upgrade-system-tests-41') {
+ base {
+ archivesName = "kafka-streams-upgrade-system-tests-41"
+ }
+
+ dependencies {
+ testImplementation libs.kafkaStreams_41
+ testRuntimeOnly libs.junitJupiter
+ testRuntimeOnly runtimeTestLibs
}
systemTestLibs {
@@ -3291,6 +3381,7 @@ project(':jmh-benchmarks') {
implementation project(':raft')
implementation project(':clients')
implementation project(':coordinator-common')
+ implementation project(':coordinator-common').sourceSets.test.output
implementation project(':group-coordinator')
implementation project(':group-coordinator:group-coordinator-api')
implementation project(':metadata')
@@ -3339,7 +3430,7 @@ project(':jmh-benchmarks') {
if (System.getProperty("jmhArgs")) {
args System.getProperty("jmhArgs").split(' ')
}
- args = [shadowJar.archivePath, *args]
+ args = [shadowJar.archiveFile.get().asFile, *args]
}
}
diff --git a/checkstyle/import-control-clients-integration-tests.xml b/checkstyle/import-control-clients-integration-tests.xml
index 44cf0dba1fbec..3c6f50890d2b8 100644
--- a/checkstyle/import-control-clients-integration-tests.xml
+++ b/checkstyle/import-control-clients-integration-tests.xml
@@ -21,12 +21,14 @@
-
+
+
+
diff --git a/checkstyle/import-control-coordinator-common.xml b/checkstyle/import-control-coordinator-common.xml
index bafffe8069746..7841697cf892a 100644
--- a/checkstyle/import-control-coordinator-common.xml
+++ b/checkstyle/import-control-coordinator-common.xml
@@ -58,9 +58,11 @@
+
+
diff --git a/checkstyle/import-control-group-coordinator.xml b/checkstyle/import-control-group-coordinator.xml
index 8b6a8d99f5eaa..1f0e91de144bc 100644
--- a/checkstyle/import-control-group-coordinator.xml
+++ b/checkstyle/import-control-group-coordinator.xml
@@ -51,6 +51,7 @@
+
@@ -76,6 +77,7 @@
+
diff --git a/checkstyle/import-control-jmh-benchmarks.xml b/checkstyle/import-control-jmh-benchmarks.xml
index 4469ccf3bbeb9..4c11bc3acb42e 100644
--- a/checkstyle/import-control-jmh-benchmarks.xml
+++ b/checkstyle/import-control-jmh-benchmarks.xml
@@ -42,7 +42,6 @@
-
@@ -53,7 +52,7 @@
-
+
diff --git a/checkstyle/import-control-metadata.xml b/checkstyle/import-control-metadata.xml
index c2660674e3259..773635cec8e9c 100644
--- a/checkstyle/import-control-metadata.xml
+++ b/checkstyle/import-control-metadata.xml
@@ -83,7 +83,6 @@
-
@@ -108,6 +107,7 @@
+
@@ -160,9 +160,9 @@
-
+
@@ -198,18 +198,4 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/checkstyle/import-control-server-common.xml b/checkstyle/import-control-server-common.xml
index 8d85dffa341f0..95a014b87e4ec 100644
--- a/checkstyle/import-control-server-common.xml
+++ b/checkstyle/import-control-server-common.xml
@@ -33,6 +33,7 @@
+
@@ -49,6 +50,9 @@
+
+
+
@@ -60,6 +64,7 @@
+
diff --git a/checkstyle/import-control-server.xml b/checkstyle/import-control-server.xml
index 30a7f5fbe761f..b3d1b928cc6db 100644
--- a/checkstyle/import-control-server.xml
+++ b/checkstyle/import-control-server.xml
@@ -86,10 +86,15 @@
+
+
+
+
+
@@ -100,6 +105,10 @@
+
+
+
+
diff --git a/checkstyle/import-control-storage.xml b/checkstyle/import-control-storage.xml
index 639cb6dc1d04c..2a0f74126859a 100644
--- a/checkstyle/import-control-storage.xml
+++ b/checkstyle/import-control-storage.xml
@@ -94,6 +94,8 @@
+
+
@@ -156,4 +158,13 @@
+
+
+
+
+
+
+
+
+
diff --git a/checkstyle/import-control-transaction-coordinator.xml b/checkstyle/import-control-transaction-coordinator.xml
index bf2157750c3a6..810c127c95c32 100644
--- a/checkstyle/import-control-transaction-coordinator.xml
+++ b/checkstyle/import-control-transaction-coordinator.xml
@@ -38,7 +38,9 @@
+
+
diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml
index ead4111389bb3..b1ef62ca3a26b 100644
--- a/checkstyle/import-control.xml
+++ b/checkstyle/import-control.xml
@@ -145,6 +145,7 @@
+
@@ -199,6 +200,8 @@
+
+
@@ -232,6 +235,7 @@
+
@@ -428,6 +432,7 @@
+
diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml
index 4e0f338af5d46..dc9ac09a9b25c 100644
--- a/checkstyle/suppressions.xml
+++ b/checkstyle/suppressions.xml
@@ -41,7 +41,7 @@
files="(KafkaClusterTestKit).java"/>
+ files="(SharePartitionManagerTest|SharePartitionTest|ShareConsumerTest).java"/>
@@ -49,6 +49,7 @@
+
+ files="(AbstractRequest|AbstractResponse|KerberosLogin|WorkerSinkTaskTest|TransactionManagerTest|SenderTest|KafkaAdminClient|ConsumerCoordinatorTest|KafkaAdminClientTest|KafkaProducerTest).java"/>
diff --git a/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientRebootstrapTest.java b/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientRebootstrapTest.java
index 2f11e13377d07..b388744b711ab 100644
--- a/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientRebootstrapTest.java
+++ b/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientRebootstrapTest.java
@@ -24,11 +24,11 @@
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.TopicConfig;
import org.apache.kafka.common.test.ClusterInstance;
-import org.apache.kafka.common.test.TestUtils;
import org.apache.kafka.common.test.api.ClusterConfigProperty;
import org.apache.kafka.common.test.api.ClusterTest;
import org.apache.kafka.common.test.api.Type;
import org.apache.kafka.coordinator.group.GroupCoordinatorConfig;
+import org.apache.kafka.test.TestUtils;
import java.time.Duration;
import java.util.List;
diff --git a/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java b/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java
new file mode 100644
index 0000000000000..a007035e860a8
--- /dev/null
+++ b/clients/clients-integration-tests/src/test/java/org/apache/kafka/clients/ClientsTestUtils.java
@@ -0,0 +1,539 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.clients;
+
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.clients.consumer.OffsetAndMetadata;
+import org.apache.kafka.clients.consumer.OffsetCommitCallback;
+import org.apache.kafka.clients.consumer.RetriableCommitFailedException;
+import org.apache.kafka.clients.producer.Producer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.kafka.common.ClusterResource;
+import org.apache.kafka.common.ClusterResourceListener;
+import org.apache.kafka.common.PartitionInfo;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.common.internals.Topic;
+import org.apache.kafka.common.record.TimestampType;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.test.ClusterInstance;
+import org.apache.kafka.test.TestUtils;
+
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
+
+import static org.apache.kafka.clients.ClientsTestUtils.TestClusterResourceListenerDeserializer.UPDATE_CONSUMER_COUNT;
+import static org.apache.kafka.clients.ClientsTestUtils.TestClusterResourceListenerSerializer.UPDATE_PRODUCER_COUNT;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.consumer.ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG;
+import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+public class ClientsTestUtils {
+
+ private static final String KEY_PREFIX = "key ";
+ private static final String VALUE_PREFIX = "value ";
+
+ private ClientsTestUtils() {}
+
+ public static List> consumeRecords(
+ Consumer consumer,
+ int numRecords
+ ) throws InterruptedException {
+ return consumeRecords(consumer, numRecords, Integer.MAX_VALUE);
+ }
+
+ public static List> consumeRecords(
+ Consumer consumer,
+ int numRecords,
+ int maxPollRecords
+ ) throws InterruptedException {
+ List> consumedRecords = new ArrayList<>();
+ TestUtils.waitForCondition(() -> {
+ var records = consumer.poll(Duration.ofMillis(100));
+ records.forEach(consumedRecords::add);
+ assertTrue(records.count() <= maxPollRecords);
+ return consumedRecords.size() >= numRecords;
+ }, 60000, "Timed out before consuming expected " + numRecords + " records.");
+
+ return consumedRecords;
+ }
+
+ public static void consumeAndVerifyRecords(
+ Consumer consumer,
+ TopicPartition tp,
+ int numRecords,
+ int startingOffset,
+ int startingKeyAndValueIndex,
+ long startingTimestamp,
+ long timestampIncrement
+ ) throws InterruptedException {
+ consumeAndVerifyRecords(
+ consumer,
+ tp,
+ numRecords,
+ Integer.MAX_VALUE,
+ startingOffset,
+ startingKeyAndValueIndex,
+ startingTimestamp,
+ timestampIncrement
+ );
+ }
+
+ public static void pollUntilTrue(
+ Consumer consumer,
+ Supplier testCondition,
+ String msg
+ ) throws InterruptedException {
+ pollUntilTrue(consumer, Duration.ofMillis(100), testCondition, 15_000L, msg);
+ }
+
+ public static void pollUntilTrue(
+ Consumer consumer,
+ Supplier testCondition,
+ long waitTimeMs,
+ String msg
+ ) throws InterruptedException {
+ pollUntilTrue(consumer, Duration.ofMillis(100), testCondition, waitTimeMs, msg);
+ }
+
+ public static void pollUntilTrue(
+ Consumer consumer,
+ Duration timeout,
+ Supplier testCondition,
+ long waitTimeMs,
+ String msg
+ ) throws InterruptedException {
+ TestUtils.waitForCondition(() -> {
+ consumer.poll(timeout);
+ return testCondition.get();
+ }, waitTimeMs, msg);
+ }
+
+ public static void consumeAndVerifyRecordsWithTimeTypeLogAppend(
+ Consumer consumer,
+ TopicPartition tp,
+ int numRecords,
+ long startingTimestamp
+ ) throws InterruptedException {
+ var records = consumeRecords(consumer, numRecords, Integer.MAX_VALUE);
+ var now = System.currentTimeMillis();
+ for (var i = 0; i < numRecords; i++) {
+ var record = records.get(i);
+ assertEquals(tp.topic(), record.topic());
+ assertEquals(tp.partition(), record.partition());
+
+ assertTrue(record.timestamp() >= startingTimestamp && record.timestamp() <= now,
+ "Got unexpected timestamp " + record.timestamp() + ". Timestamp should be between [" + startingTimestamp + ", " + now + "]");
+
+ assertEquals(i, record.offset());
+ assertEquals(KEY_PREFIX + i, new String(record.key()));
+ assertEquals(VALUE_PREFIX + i, new String(record.value()));
+ // this is true only because K and V are byte arrays
+ assertEquals((KEY_PREFIX + i).length(), record.serializedKeySize());
+ assertEquals((VALUE_PREFIX + i).length(), record.serializedValueSize());
+ }
+ }
+
+ public static void consumeAndVerifyRecords(
+ Consumer consumer,
+ TopicPartition tp,
+ int numRecords,
+ int maxPollRecords,
+ int startingOffset,
+ int startingKeyAndValueIndex,
+ long startingTimestamp,
+ long timestampIncrement
+ ) throws InterruptedException {
+ var records = consumeRecords(consumer, numRecords, maxPollRecords);
+ for (var i = 0; i < numRecords; i++) {
+ var record = records.get(i);
+ var offset = startingOffset + i;
+
+ assertEquals(tp.topic(), record.topic());
+ assertEquals(tp.partition(), record.partition());
+
+ assertEquals(TimestampType.CREATE_TIME, record.timestampType());
+ var timestamp = startingTimestamp + i * (timestampIncrement > 0 ? timestampIncrement : 1);
+ assertEquals(timestamp, record.timestamp());
+
+ assertEquals(offset, record.offset());
+ var keyAndValueIndex = startingKeyAndValueIndex + i;
+ assertEquals(KEY_PREFIX + keyAndValueIndex, new String(record.key()));
+ assertEquals(VALUE_PREFIX + keyAndValueIndex, new String(record.value()));
+ // this is true only because K and V are byte arrays
+ assertEquals((KEY_PREFIX + keyAndValueIndex).length(), record.serializedKeySize());
+ assertEquals((VALUE_PREFIX + keyAndValueIndex).length(), record.serializedValueSize());
+ }
+ }
+
+ public static void consumeAndVerifyRecords(
+ Consumer consumer,
+ TopicPartition tp,
+ int numRecords,
+ int startingOffset,
+ int startingKeyAndValueIndex,
+ long startingTimestamp
+ ) throws InterruptedException {
+ consumeAndVerifyRecords(consumer, tp, numRecords, startingOffset, startingKeyAndValueIndex, startingTimestamp, -1);
+ }
+
+ public static void consumeAndVerifyRecords(
+ Consumer consumer,
+ TopicPartition tp,
+ int numRecords,
+ int startingOffset
+ ) throws InterruptedException {
+ consumeAndVerifyRecords(consumer, tp, numRecords, startingOffset, 0, 0, -1);
+ }
+
+ public static void sendRecords(
+ ClusterInstance cluster,
+ TopicPartition tp,
+ int numRecords,
+ long startingTimestamp,
+ long timestampIncrement
+ ) {
+ try (Producer producer = cluster.producer()) {
+ for (var i = 0; i < numRecords; i++) {
+ sendRecord(producer, tp, startingTimestamp, i, timestampIncrement);
+ }
+ producer.flush();
+ }
+ }
+
+ public static void sendRecords(
+ ClusterInstance cluster,
+ TopicPartition tp,
+ int numRecords,
+ long startingTimestamp
+ ) {
+ sendRecords(cluster, tp, numRecords, startingTimestamp, -1);
+ }
+
+ public static void sendRecords(
+ ClusterInstance cluster,
+ TopicPartition tp,
+ int numRecords
+ ) {
+ sendRecords(cluster, tp, numRecords, System.currentTimeMillis());
+ }
+
+ public static List> sendRecords(
+ Producer producer,
+ TopicPartition tp,
+ int numRecords,
+ long startingTimestamp,
+ long timestampIncrement
+ ) {
+ List> records = new ArrayList<>();
+ for (var i = 0; i < numRecords; i++) {
+ var record = sendRecord(producer, tp, startingTimestamp, i, timestampIncrement);
+ records.add(record);
+ }
+ producer.flush();
+ return records;
+ }
+
+ public static void sendRecords(
+ Producer producer,
+ TopicPartition tp,
+ int numRecords,
+ long startingTimestamp
+ ) {
+ for (var i = 0; i < numRecords; i++) {
+ sendRecord(producer, tp, startingTimestamp, i, -1);
+ }
+ producer.flush();
+ }
+
+ public static void awaitAssignment(
+ Consumer consumer,
+ Set expectedAssignment
+ ) throws InterruptedException {
+ TestUtils.waitForCondition(() -> {
+ consumer.poll(Duration.ofMillis(100));
+ return consumer.assignment().equals(expectedAssignment);
+ }, "Timed out while awaiting expected assignment " + expectedAssignment + ". " +
+ "The current assignment is " + consumer.assignment()
+ );
+ }
+
+ private static ProducerRecord sendRecord(
+ Producer producer,
+ TopicPartition tp,
+ long startingTimestamp,
+ int numRecord,
+ long timestampIncrement
+ ) {
+ var timestamp = startingTimestamp + numRecord * (timestampIncrement > 0 ? timestampIncrement : 1);
+ var record = new ProducerRecord<>(
+ tp.topic(),
+ tp.partition(),
+ timestamp,
+ (KEY_PREFIX + numRecord).getBytes(),
+ (VALUE_PREFIX + numRecord).getBytes()
+ );
+ producer.send(record);
+ return record;
+ }
+
+ public static void sendAndAwaitAsyncCommit(
+ Consumer consumer,
+ Optional