diff --git a/.circleci/collect_reports.sh b/.circleci/collect_reports.sh index 9e085003c62..ce66a61358e 100755 --- a/.circleci/collect_reports.sh +++ b/.circleci/collect_reports.sh @@ -35,12 +35,12 @@ done mkdir -p $REPORTS_DIR >/dev/null 2>&1 -cp /tmp/hs_err_pid*.log $REPORTS_DIR || true -cp /tmp/java_pid*.hprof $REPORTS_DIR || true -cp /tmp/javacore.* $REPORTS_DIR || true -cp /tmp/*.trc $REPORTS_DIR || true -cp /tmp/*.dmp $REPORTS_DIR || true -cp /tmp/dd-profiler/*.jfr $REPORTS_DIR || true +cp /tmp/hs_err_pid*.log $REPORTS_DIR 2>/dev/null || true +cp /tmp/java_pid*.hprof $REPORTS_DIR 2>/dev/null || true +cp /tmp/javacore.* $REPORTS_DIR 2>/dev/null || true +cp /tmp/*.trc $REPORTS_DIR 2>/dev/null || true +cp /tmp/*.dmp $REPORTS_DIR 2>/dev/null || true +cp /tmp/dd-profiler/*.jfr $REPORTS_DIR 2>/dev/null || true function process_reports () { project_to_save=$1 @@ -59,9 +59,9 @@ function process_reports () { else echo "copying reports for $project_to_save" mkdir -p $report_path - cp -r workspace/$project_to_save/build/reports/* $report_path/ || true - cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ || true - cp workspace/$project_to_save/build/javacore*.txt $report_path/ || true + cp -r workspace/$project_to_save/build/reports/* $report_path/ 2>/dev/null || true + cp workspace/$project_to_save/build/hs_err_pid*.log $report_path/ 2>/dev/null || true + cp workspace/$project_to_save/build/javacore*.txt $report_path/ 2>/dev/null || true fi } @@ -73,4 +73,4 @@ for report_path in workspace/**/build/reports; do process_reports $report_path done -tar -cvzf reports.tar $REPORTS_DIR +tar -czf reports.tar $REPORTS_DIR diff --git a/.circleci/collect_results.sh b/.circleci/collect_results.sh index 214895e3257..4b3ab764011 100755 --- a/.circleci/collect_results.sh +++ b/.circleci/collect_results.sh @@ -35,7 +35,7 @@ function get_source_file () { fi done done < <(grep -rl "class $class" "$file_path") - file_path="$common_root" + file_path="/$common_root" fi } diff --git a/.circleci/config.yml b/.circleci/config.yml index ca098272cd4..e5a2864178f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,40 +1,16 @@ version: 2.1 -setup: true -python310_image: &python310_image cimg/python:3.10 -parameters: - nightly: - type: boolean - default: false - weekly: - type: boolean - default: false -orbs: - continuation: circleci/continuation@0.1.2 -executors: - python310: - docker: - - image: *python310_image - resource_class: small + jobs: - setup: - executor: python310 + donothing: + resource_class: small + docker: + - image: alpine steps: - - checkout - - run: - name: Checkout merge commit - command: .circleci/checkout_merge_commit.sh - - run: - name: Install dependencies - command: pip3 install jinja2 requests - run: - name: Generate config - command: >- - CIRCLE_IS_NIGHTLY="<< pipeline.parameters.nightly >>" - CIRCLE_IS_WEEKLY="<< pipeline.parameters.weekly >>" - .circleci/render_config.py - - continuation/continue: - configuration_path: .circleci/config.continue.yml + command: echo 'Done' + workflows: - setup: + build_test: jobs: - - setup + - donothing: + name: required diff --git a/.circleci/upload_ciapp.sh b/.circleci/upload_ciapp.sh index 0c114b9ebdb..11fd546f50e 100755 --- a/.circleci/upload_ciapp.sh +++ b/.circleci/upload_ciapp.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash SERVICE_NAME="dd-trace-java" -PIPELINE_STAGE=$1 +CACHE_TYPE=$1 TEST_JVM=$2 # JAVA_???_HOME are set in the base image for each used JDK https://github.com/DataDog/dd-trace-java-docker-build/blob/master/Dockerfile#L86 @@ -23,7 +23,7 @@ junit_upload() { DD_API_KEY=$1 \ datadog-ci junit upload --service $SERVICE_NAME \ --logs \ - --tags "test.traits:{\"marker\":[\"$PIPELINE_STAGE\"]}" \ + --tags "test.traits:{\"category\":[\"$CACHE_TYPE\"]}" \ --tags "runtime.name:$(java_prop java.runtime.name)" \ --tags "runtime.vendor:$(java_prop java.vendor)" \ --tags "runtime.version:$(java_prop java.version)" \ diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5983b5d0bd2..b4882f18d1e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,72 +5,108 @@ * @DataDog/apm-java # @DataDog/apm-idm-java -dd-java-agent/instrumentation/ @DataDog/apm-idm-java +/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/decorator/ @DataDog/apm-idm-java +/dd-java-agent/instrumentation/ @DataDog/apm-idm-java -# @DataDog/profiling-java -dd-java-agent/agent-profiling/ @DataDog/profiling-java -dd-java-agent/agent-crashtracking/ @DataDog/profiling-java -dd-java-agent/instrumentation/exception-profiling/ @DataDog/profiling-java -dd-java-agent/instrumentation/java-directbytebuffer/ @DataDog/profiling-java -dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jfr/ @DataDog/profiling-java -dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/ @DataDog/profiling-java -dd-smoke-tests/profiling-integration-tests/ @DataDog/profiling-java +# @DataDog/apm-release-platform +/.gitlab/ @DataDog/apm-release-platform +/.gitlab-ci.yml @DataDog/apm-release-platform -# @DataDog/ci-app-libraries-java -dd-java-agent/agent-ci-visibility/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/cucumber/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/jacoco/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/junit-4.10/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/junit-5.3/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/karate/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/scalatest/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/selenium/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/testng/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/gradle-3.0/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/gradle-8.3/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/gradle-testing/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/maven-3.2.1/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/maven-surefire/ @DataDog/ci-app-libraries-java -dd-java-agent/instrumentation/weaver/ @DataDog/ci-app-libraries-java -dd-smoke-tests/gradle/ @DataDog/ci-app-libraries-java -dd-smoke-tests/maven/ @DataDog/ci-app-libraries-java -**/civisibility/ @DataDog/ci-app-libraries-java -**/CiVisibility*.java @DataDog/ci-app-libraries-java -**/CiVisibility*.groovy @DataDog/ci-app-libraries-java +# @DataDog/apm-sdk-api-java +/dd-trace-ot/ @DataDog/apm-sdk-api-java -# @DataDog/debugger-java (Live Debugger) -dd-java-agent/agent-debugger/ @DataDog/debugger-java -dd-smoke-tests/debugger-integration-tests/ @DataDog/debugger-java +# @DataDog/apm-serverless +/dd-trace-core/src/main/java/datadog/trace/lambda/ @DataDog/apm-serverless +/dd-trace-core/src/test/groovy/datadog/trace/lambda/ @DataDog/apm-serverless + +# @DataDog/apm-lang-platform-java +/.circleci/ @DataDog/apm-lang-platform-java +/.github/ @DataDog/apm-lang-platform-java +/benchmark/ @DataDog/apm-lang-platform-java +/components/ @DataDog/apm-lang-platform-java +/dd-java-agent/instrumentation/java-* @DataDog/apm-lang-platform-java +/metadata/ @DataDog/apm-lang-platform-java +/remote-config/ @DataDog/apm-lang-platform-java +/telemetry/ @DataDog/apm-lang-platform-java +/test-published-dependencies/ @DataDog/apm-lang-platform-java # @DataDog/asm-java (AppSec/IAST) -dd-java-agent/agent-iast/ @DataDog/asm-java -dd-java-agent/instrumentation/*iast* @DataDog/asm-java -dd-java-agent/instrumentation/*appsec* @DataDog/asm-java -dd-java-agent/instrumentation/json/ @DataDog/asm-java -dd-java-agent/instrumentation/snakeyaml/ @DataDog/asm-java -dd-java-agent/instrumentation/velocity/ @DataDog/asm-java -dd-java-agent/instrumentation/freemarker/ @DataDog/asm-java -dd-smoke-tests/iast-util/ @DataDog/asm-java -dd-smoke-tests/spring-security/ @DataDog/asm-java -dd-java-agent/instrumentation/commons-fileupload/ @DataDog/asm-java -dd-java-agent/instrumentation/spring-security-5/ @DataDog/asm-java -**/appsec/ @DataDog/asm-java -**/iast/ @DataDog/asm-java -**/Iast*.java @DataDog/asm-java -**/Iast*.groovy @DataDog/asm-java -**/rasp/ @Datadog/asm-java -**/*Rasp*.java @DataDog/asm-java -**/*Rasp*.groovy @DataDog/asm-java -**/*Waf*.java @DataDog/asm-java -**/*Waf*.groovy @DataDog/asm-java +/buildSrc/call-site-instrumentation-plugin/ @DataDog/asm-java +/dd-java-agent/agent-iast/ @DataDog/asm-java +/dd-java-agent/instrumentation/*iast* @DataDog/asm-java +/dd-java-agent/instrumentation/*appsec* @DataDog/asm-java +/dd-java-agent/instrumentation/json/ @DataDog/asm-java +/dd-java-agent/instrumentation/snakeyaml/ @DataDog/asm-java +/dd-java-agent/instrumentation/velocity/ @DataDog/asm-java +/dd-java-agent/instrumentation/freemarker/ @DataDog/asm-java +/dd-smoke-tests/iast-util/ @DataDog/asm-java +/dd-smoke-tests/spring-security/ @DataDog/asm-java +/dd-java-agent/instrumentation/commons-fileupload/ @DataDog/asm-java +/dd-java-agent/instrumentation/spring-security-5/ @DataDog/asm-java +/dd-trace-api/src/main/java/datadog/trace/api/EventTracker.java @DataDog/asm-java +/internal-api/src/main/java/datadog/trace/api/gateway/ @DataDog/asm-java +**/appsec/ @DataDog/asm-java +**/*CallSite*.java @DataDog/asm-java +**/*CallSite*.groovy @DataDog/asm-java +**/*CallSite*.kt @DataDog/asm-java +**/iast/ @DataDog/asm-java +**/Iast*.java @DataDog/asm-java +**/Iast*.groovy @DataDog/asm-java +**/rasp/ @Datadog/asm-java +**/*Rasp*.java @DataDog/asm-java +**/*Rasp*.groovy @DataDog/asm-java +**/*Waf*.java @DataDog/asm-java +**/*Waf*.groovy @DataDog/asm-java + +# @DataDog/ci-app-libraries-java +/dd-java-agent/agent-ci-visibility/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/cucumber/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/jacoco/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/junit-4.10/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/junit-5.3/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/karate/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/scalatest/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/selenium/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/testng/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/gradle-3.0/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/gradle-8.3/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/gradle-testing/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/maven-3.2.1/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/maven-surefire/ @DataDog/ci-app-libraries-java +/dd-java-agent/instrumentation/weaver/ @DataDog/ci-app-libraries-java +/dd-smoke-tests/gradle/ @DataDog/ci-app-libraries-java +/dd-smoke-tests/maven/ @DataDog/ci-app-libraries-java +/internal-api/src/main/java/datadog/trace/api/git/ @DataDog/ci-app-libraries-java +**/civisibility/ @DataDog/ci-app-libraries-java +**/CiVisibility*.java @DataDog/ci-app-libraries-java +**/CiVisibility*.groovy @DataDog/ci-app-libraries-java + +# @DataDog/debugger-java (Live Debugger) +/dd-java-agent/agent-debugger/ @DataDog/debugger-java +/dd-smoke-tests/debugger-integration-tests/ @DataDog/debugger-java # @DataDog/data-jobs-monitoring -dd-java-agent/instrumentation/spark/ @DataDog/data-jobs-monitoring -dd-java-agent/instrumentation/spark-executor/ @DataDog/data-jobs-monitoring +/dd-java-agent/instrumentation/spark/ @DataDog/data-jobs-monitoring +/dd-java-agent/instrumentation/spark-executor/ @DataDog/data-jobs-monitoring # @DataDog/data-streams-monitoring -dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams @DataDog/data-streams-monitoring -dd-trace-core/src/main/java/datadog/trace/core/datastreams @DataDog/data-streams-monitoring -dd-trace-core/src/test/groovy/datadog/trace/core/datastreams @DataDog/data-streams-monitoring -internal-api/src/main/java/datadog/trace/api/datastreams @DataDog/data-streams-monitoring -internal-api/src/test/groovy/datadog/trace/api/datastreams @DataDog/data-streams-monitoring +/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/datastreams @DataDog/data-streams-monitoring +/dd-trace-core/src/main/java/datadog/trace/core/datastreams @DataDog/data-streams-monitoring +/dd-trace-core/src/test/groovy/datadog/trace/core/datastreams @DataDog/data-streams-monitoring +/internal-api/src/main/java/datadog/trace/api/datastreams @DataDog/data-streams-monitoring +/internal-api/src/test/groovy/datadog/trace/api/datastreams @DataDog/data-streams-monitoring +**/datastreams/ @DataDog/data-streams-monitoring +**/DataStreams* @DataDog/data-streams-monitoring + +# @DataDog/profiling-java +/dd-java-agent/agent-profiling/ @DataDog/profiling-java +/dd-java-agent/agent-crashtracking/ @DataDog/profiling-java +/dd-java-agent/instrumentation/exception-profiling/ @DataDog/profiling-java +/dd-java-agent/instrumentation/java-directbytebuffer/ @DataDog/profiling-java +/dd-java-agent/agent-bootstrap/src/main/java/datadog/trace/bootstrap/instrumentation/jfr/ @DataDog/profiling-java +/dd-java-agent/agent-bootstrap/src/main/java11/datadog/trace/bootstrap/instrumentation/jfr/ @DataDog/profiling-java +/dd-trace-api/src/main/java/datadog/trace/api/profiling @DataDog/profiling-java +/internal-api/src/main/java/datadog/trace/api/profiling @DataDog/profiling-java +/internal-api/src/main/java/datadog/trace/api/EndpointCheckpointer.java @DataDog/profiling-java +/internal-api/src/main/java/datadog/trace/api/EndpointTracker.java @DataDog/profiling-java +/dd-smoke-tests/profiling-integration-tests/ @DataDog/profiling-java diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c272b36b581..f914fd12ade 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -8,7 +8,13 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "monthly" + interval: "weekly" + labels: + - "comp: tooling" + - "tag: dependencies" + - "tag: no release notes" + commit-message: + prefix: "chore(ci): " groups: gh-actions-packages: patterns: diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index c0fb4db7744..2968ad41239 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -10,6 +10,7 @@ - Assign the `type:` and (`comp:` or `inst:`) labels in addition to [any usefull labels](https://github.com/DataDog/dd-trace-java/blob/master/CONTRIBUTING.md#labels) - Don't use `close`, `fix` or any [linking keywords](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword) when referencing an issue. Use `solves` instead, and assign the PR [milestone](https://github.com/DataDog/dd-trace-java/milestones) to the issue +- Update the [CODEOWNERS](https://github.com/DataDog/dd-trace-java/blob/master/.github/CODEOWNERS) file on source file addition, move, or deletion - Update the [public documentation](https://docs.datadoghq.com/tracing/trace_collection/library_config/java/) in case of new configuration flag or behavior Jira ticket: [PROJ-IDENT] diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 7273a1d8435..3d8682e5e1a 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -57,7 +57,7 @@ _Notes:_ This action will not apply to release candidate versions using `-RC` ta _Trigger:_ Quarterly released, loosely [a day after the new image tag is created](https://github.com/DataDog/dd-trace-java-docker-build/blob/master/.github/workflows/docker-tag.yml). -_Action:_ Update the Docker build image used in CircleCI and GitLab CI with the latest tag. +_Action:_ Update the Docker build image used in GitLab CI with the latest tag. _Recovery:_ Download artifacts and upload them manually to the related _download release_. @@ -106,7 +106,7 @@ _Recovery:_ Manually trigger the action again. ### analyze-changes [πŸ”—](analyze-changes.yaml) -_Trigger:_ When pushing commits to `master` or any pull request targeting `master`. +_Trigger:_ When pushing commits to `master`. _Action:_ @@ -115,6 +115,16 @@ _Action:_ _Notes:_ Results are sent on both production and staging environments. +### check-ci-pipelines [πŸ”—](check-ci-pipelines.yaml) + +_Trigger:_ When opening or updating a PR. + +_Action:_ This action will check all other continuous integration jobs (Github action, Gitlab, CircleCi), and will fail if any of them fails. +The purpose of this job is to be required for PR merges, achieving Green CI Policy. +It got an `ignored` parameters to exclude some jobs if they are temprorary failing. + +_Recovery:_ Manually trigger the action on the desired branch. + ### comment-on-submodule-update [πŸ”—](comment-on-submodule-update.yaml) _Trigger:_ When creating a PR commits to `master` or a `release/*` branch with a Git Submodule update. @@ -137,7 +147,6 @@ _Action:_ Build the Java Client Library and runs [the system tests](https://gith _Recovery:_ Manually trigger the action on the desired branch. - ## Maintenance GitHub actions should be part of the [repository allowed actions to run](https://github.com/DataDog/dd-trace-java/settings/actions). diff --git a/.github/workflows/add-release-to-cloudfoundry.yaml b/.github/workflows/add-release-to-cloudfoundry.yaml index ea59d162797..6f8843c0943 100644 --- a/.github/workflows/add-release-to-cloudfoundry.yaml +++ b/.github/workflows/add-release-to-cloudfoundry.yaml @@ -43,7 +43,7 @@ jobs: run: | echo "${{ steps.get-release-version.outputs.VERSION }}: ${{ steps.get-release-url.outputs.URL }}" >> index.yml - name: Commit and push changes - uses: planetscale/ghcommit-action@5b20c92facae8dbf8a3836dc65b8503dda378573 # v0.2.13 + uses: planetscale/ghcommit-action@6a383e778f6620afde4bf4b45069d3c6983c1ae2 # v0.2.15 with: commit_message: "chore: Add version ${{ steps.get-release-version.outputs.VERSION }} to Cloud Foundry" repo: ${{ github.repository }} diff --git a/.github/workflows/analyze-changes.yaml b/.github/workflows/analyze-changes.yaml index 7dc19f6332f..b024a7f9fa2 100644 --- a/.github/workflows/analyze-changes.yaml +++ b/.github/workflows/analyze-changes.yaml @@ -40,7 +40,7 @@ jobs: ${{ runner.os }}-gradle- - name: Initialize CodeQL - uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/init@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 with: languages: 'java' build-mode: 'manual' @@ -57,10 +57,12 @@ jobs: --build-cache --parallel --stacktrace --no-daemon --max-workers=4 - name: Perform CodeQL Analysis and upload results to GitHub Security tab - uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/analyze@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 trivy: name: Analyze changes with Trivy + # Don’t run on PR, only when pushing to master + if: github.event_name == 'push' && github.ref == 'refs/heads/master' runs-on: ubuntu-latest permissions: actions: read @@ -107,7 +109,7 @@ jobs: ls -laR "./workspace/.trivy" - name: Run Trivy security scanner - uses: aquasecurity/trivy-action@6c175e9c4083a92bbca2f9724c8a5e33bc2d97a5 # v0.30.0 + uses: aquasecurity/trivy-action@76071ef0d7ec797419534a183b498b4d6366cf37 # v0.31.0 with: scan-type: rootfs scan-ref: './workspace/.trivy/' @@ -120,7 +122,7 @@ jobs: TRIVY_JAVA_DB_REPOSITORY: ghcr.io/aquasecurity/trivy-java-db,public.ecr.aws/aquasecurity/trivy-java-db - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16 + uses: github/codeql-action/upload-sarif@ce28f5bb42b7a9f2c824e633a3f6ee835bab6858 # v3.29.0 if: always() with: sarif_file: 'trivy-results.sarif' diff --git a/.github/workflows/check-ci-pipelines.yml b/.github/workflows/check-ci-pipelines.yml new file mode 100644 index 00000000000..dd182f34369 --- /dev/null +++ b/.github/workflows/check-ci-pipelines.yml @@ -0,0 +1,34 @@ +name: Check Pull Request CI Status + +on: + pull_request: + types: + - opened + - synchronize + - reopened + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: + checks: read + statuses: read + +jobs: + check-ci-pipelines: + # Do not change this name, it must be equal to job id + # https://github.com/DataDog/ensure-ci-success/blob/main/docs/limitations.md#do-not-set-a-name-to-the-job-shipping-ensure-ci-success + name: check-ci-pipelines + runs-on: ubuntu-latest + steps: + - name: Run Ensure CI Success + uses: DataDog/ensure-ci-success@4a4b720e881d965254a9de2a4f14d1ec0c3d0d7c + with: + initial-delay-seconds: "500" + max-retries: "60" + ignored-name-patterns: | + dd-gitlab/.* + Check pull requests + +# gitlab pipelines are reported via dd-gitlab/default-pipeline status, which can be used as a GH status check diff --git a/.github/workflows/update-docker-build-image.yaml b/.github/workflows/update-docker-build-image.yaml index 8de7b4a90d1..57d864ccb7b 100644 --- a/.github/workflows/update-docker-build-image.yaml +++ b/.github/workflows/update-docker-build-image.yaml @@ -50,17 +50,14 @@ jobs: fi echo "tag=${TAG}" >> "$GITHUB_OUTPUT" echo "::notice::Using Docker build image tag: ${TAG}" - - name: Update the Docker build image in CircleCI config - run: | - sed -i 's|DOCKER_IMAGE_VERSION=.*|DOCKER_IMAGE_VERSION="${{ steps.define-tag.outputs.tag }}"|' .circleci/render_config.py - name: Update the Docker build image in GitLab CI config run: | - sed -i 's|image: ghcr.io/datadog/dd-trace-java-docker-build:.*|image: ghcr.io/datadog/dd-trace-java-docker-build:${{ steps.define-tag.outputs.tag }}-base|' .gitlab-ci.yml + sed -i '' -E 's|(BUILDER_IMAGE_VERSION_PREFIX:)[^#]*([#].*)|\1 "${{ steps.define-tag.outputs.tag }}-" \2|' .gitlab-ci.yml - name: Commit and push changes env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} run: | - ghcommit --repository ${{ github.repository }} --branch ${{ steps.define-branch.outputs.branch }} --add .circleci/render_config.py --add .gitlab-ci.yml --message "feat(ci): Update Docker build image" + ghcommit --repository ${{ github.repository }} --branch ${{ steps.define-branch.outputs.branch }} --add .gitlab-ci.yml --message "feat(ci): Update Docker build image" - name: Create pull request env: GH_TOKEN: ${{ github.token }} diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6a0ec785ec6..0f890df7e0f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,6 +1,5 @@ include: - - remote: https://gitlab-templates.ddbuild.io/libdatadog/include/ci_authenticated_job.yml - - remote: https://gitlab-templates.ddbuild.io/libdatadog/include/one-pipeline.yml + - local: ".gitlab/one-pipeline.locked.yml" - local: ".gitlab/benchmarks.yml" - local: ".gitlab/macrobenchmarks.yml" - local: ".gitlab/exploration-tests.yml" @@ -18,79 +17,153 @@ stages: - generate-signing-key variables: + # Gitlab runner features; see https://docs.gitlab.com/runner/configuration/feature-flags.html + # Fold and time all script sections + FF_SCRIPT_SECTIONS: 1 + REGISTRY: 486234852809.dkr.ecr.us-east-1.amazonaws.com BUILD_JOB_NAME: "build" DEPENDENCY_CACHE_POLICY: pull BUILD_CACHE_POLICY: pull - GRADLE_VERSION: "8.4" # must match gradle-wrapper.properties + GRADLE_VERSION: "8.5" # must match gradle-wrapper.properties MAVEN_REPOSITORY_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/maven-central/" GRADLE_PLUGIN_PROXY: "http://artifactual.artifactual.all-clusters.local-dc.fabric.dog:8081/repository/gradle-plugin-portal-proxy/" - JAVA_BUILD_IMAGE_VERSION: "v25.01" + BUILDER_IMAGE_VERSION_PREFIX: "" # use either an empty string (e.g. "") for latest images or a version followed by a hyphen (e.g. "v25.05-") REPO_NOTIFICATION_CHANNEL: "#apm-java-escalations" + DEFAULT_TEST_JVMS: /^(8|11|17|21)$/ PROFILE_TESTS: description: "Enable profiling of tests" value: "false" + NON_DEFAULT_JVMS: + description: "Enable tests on JVMs that are not the default" + value: "false" + RUN_FLAKY_TESTS: + description: "Enable flaky tests" + value: "false" + +.test_matrix: &test_matrix + - testJvm: &test_jvms + - "8" + - "11" + - "17" + - "21" + - "semeru11" + - "oracle8" + - "zulu8" + - "semeru8" + - "ibm8" + - "zulu11" + - "semeru17" + +# Gitlab doesn't support "parallel" and "parallel:matrix" at the same time +# These blocks emulate "parallel" by including it in the matrix +.test_matrix_2: &test_matrix_2 + - testJvm: *test_jvms + CI_SPLIT: ["1/2", "2/2"] + +.test_matrix_4: &test_matrix_4 + - testJvm: *test_jvms + CI_SPLIT: ["1/4", "2/4", "3/4", "4/4"] + +.test_matrix_6: &test_matrix_6 + - testJvm: *test_jvms + CI_SPLIT: ["1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] + +.test_matrix_8: &test_matrix_8 + - testJvm: *test_jvms + CI_SPLIT: ["1/8", "2/8", "3/8", "4/8", "5/8", "6/8", "7/8", "8/8"] + +.test_matrix_12: &test_matrix_12 + - testJvm: *test_jvms + CI_SPLIT: [ "1/12", "2/12", "3/12", "4/12", "5/12", "6/12", "7/12", "8/12", "9/12", "10/12", "11/12", "12/12" ] + +.master_only: &master_only + - if: $CI_COMMIT_BRANCH == "master" + when: on_success default: tags: [ "arch:amd64" ] -.fan_in: - stage: tests - image: registry.ddbuild.io/images/base/gbi-ubuntu_2204-slim:release - script: - - echo "done" - .set_datadog_api_keys: &set_datadog_api_keys - export DATADOG_API_KEY_PROD=$(aws ssm get-parameter --region us-east-1 --name ci.dd-trace-java.DATADOG_API_KEY_PROD --with-decryption --query "Parameter.Value" --out text) # CI_NODE_INDEX and CI_NODE_TOTAL are 1-indexed and not always set. These steps normalize the numbers for jobs .normalize_node_index: &normalize_node_index - - echo "CI_NODE_TOTAL=$CI_NODE_TOTAL , CI_NODE_INDEX=$CI_NODE_INDEX" + - if [ "$CI_NO_SPLIT" == "true" ] ; then CI_NODE_INDEX=1; CI_NODE_TOTAL=1; fi # A job uses parallel but doesn't intend to split by index + - if [ -n "$CI_SPLIT" ]; then CI_NODE_INDEX="${CI_SPLIT%%/*}"; CI_NODE_TOTAL="${CI_SPLIT##*/}"; fi + - echo "CI_NODE_TOTAL=${CI_NODE_TOTAL}, CI_NODE_INDEX=$CI_NODE_INDEX" - export NORMALIZED_NODE_TOTAL=${CI_NODE_TOTAL:-1} - ONE_INDEXED_NODE_INDEX=${CI_NODE_INDEX:-1}; export NORMALIZED_NODE_INDEX=$((ONE_INDEXED_NODE_INDEX - 1)) - - echo "NORMALIZED_NODE_TOTAL=$NORMALIZED_NODE_TOTAL , NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" + - echo "NORMALIZED_NODE_TOTAL=${NORMALIZED_NODE_TOTAL}, NORMALIZED_NODE_INDEX=$NORMALIZED_NODE_INDEX" + +.cgroup_info: &cgroup_info + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "cgroup-info" "cgroup info" + - .gitlab/cgroup-info.sh + - gitlab_section_end "cgroup-info" .gradle_build: &gradle_build - image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-base + image: ghcr.io/datadog/dd-trace-java-docker-build:${BUILDER_IMAGE_VERSION_PREFIX}base stage: build variables: - GRADLE_OPTS: "-Dorg.gradle.jvmargs='-Xmx2560M -Xms2560M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" MAVEN_OPTS: "-Xms64M -Xmx512M" GRADLE_WORKERS: 2 + GRADLE_MEM: 2560M KUBERNETES_CPU_REQUEST: 8 - KUBERNETES_MEMORY_REQUEST: 6Gi + KUBERNETES_MEMORY_REQUEST: 8Gi + KUBERNETES_MEMORY_LIMIT: 8Gi + CACHE_TYPE: lib #default + RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE: 4 # Runtime.getRuntime().availableProcessors() returns incorrect or very high values in Kubernetes cache: - - key: '$CI_SERVER_VERSION-v2' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months + - key: '$CI_SERVER_VERSION-$CACHE_TYPE' # Dependencies cache. Reset the cache every time gitlab is upgraded. ~Every couple months paths: # Cached dependencies and wrappers for gradle - .gradle/wrapper - .gradle/caches - .gradle/notifications policy: $DEPENDENCY_CACHE_POLICY - - key: $CI_PIPELINE_ID-$BUILD_CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type + fallback_keys: # Use fallback keys because all cache types are not populated. See note under: populate_dep_cache + - '$CI_SERVER_VERSION-base' + - '$CI_SERVER_VERSION-lib' + - key: $CI_PIPELINE_ID-$CACHE_TYPE # Incremental build cache. Shared by all jobs in the pipeline of the same type paths: - .gradle/caches/$GRADLE_VERSION - .gradle/$GRADLE_VERSION/executionHistory - workspace policy: $BUILD_CACHE_POLICY before_script: - - export GRADLE_USER_HOME=`pwd`/.gradle - - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS -PmavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY -PgradlePluginProxy=$GRADLE_PLUGIN_PROXY" + - source .gitlab/gitlab-utils.sh + - mkdir -p .gradle + - export GRADLE_USER_HOME=$(pwd)/.gradle + - | + # Don't put jvm args here as it will be picked up by child gradle processes used in tests + cat << EOF > $GRADLE_USER_HOME/gradle.properties + mavenRepositoryProxy=$MAVEN_REPOSITORY_PROXY + gradlePluginProxy=$GRADLE_PLUGIN_PROXY + EOF + - | + # replace maven central part by MAVEN_REPOSITORY_PROXY in .mvn/wrapper/maven-wrapper.properties + sed -i "s|https://repo.maven.apache.org/maven2/|$MAVEN_REPOSITORY_PROXY|g" .mvn/wrapper/maven-wrapper.properties + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx$GRADLE_MEM -Xms$GRADLE_MEM -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + - export GRADLE_ARGS=" --build-cache --stacktrace --no-daemon --parallel --max-workers=$GRADLE_WORKERS" - *normalize_node_index # for weird reasons, gradle will always "chmod 700" the .gradle folder # with Gitlab caching, .gradle is always owned by root and thus gradle's chmod invocation fails # This dance is a hack to have .gradle owned by the Gitlab runner user - - mkdir -p .gradle + - gitlab_section_start "gradle-dance" "Fix .gradle directory permissions" - cp -r .gradle .gradle-copy - rm -rf .gradle - mv .gradle-copy .gradle - ls -la + - gitlab_section_end "gradle-dance" + after_script: + - *cgroup_info build: extends: .gradle_build variables: BUILD_CACHE_POLICY: push - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib DEPENDENCY_CACHE_POLICY: pull script: - if [ $CI_PIPELINE_SOURCE == "schedule" ] ; then ./gradlew resolveAndLockAll --write-locks; fi @@ -109,16 +182,90 @@ build: reports: dotenv: build.env -build_and_populate_dep_cache: - extends: build +build_tests: + extends: .gradle_build variables: BUILD_CACHE_POLICY: push + DEPENDENCY_CACHE_POLICY: pull + GRADLE_MEM: 4G + GRADLE_WORKERS: 3 + KUBERNETES_MEMORY_REQUEST: 18Gi + KUBERNETES_MEMORY_LIMIT: 18Gi + parallel: + matrix: + - GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" + - GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" + - GRADLE_TARGET: ":instrumentationTest" + CACHE_TYPE: "inst" + - GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestdep" + - GRADLE_TARGET: ":smokeTest" + CACHE_TYPE: "smoke" + MAVEN_OPTS: "-Xms64M -Xmx512M -Dorg.slf4j.simpleLogger.defaultLogLevel=debug" # FIXME: Build :smokeTest build fails unless mvn debug logging is on + + script: + - ./gradlew clean $GRADLE_TARGET -PskipTests $GRADLE_ARGS + +populate_dep_cache: + extends: build_tests + variables: + BUILD_CACHE_POLICY: pull DEPENDENCY_CACHE_POLICY: push rules: - if: '$POPULATE_CACHE' when: on_success - when: manual allow_failure: true + parallel: + matrix: + - GRADLE_TARGET: ":dd-java-agent:shadowJar :dd-trace-api:jar :dd-trace-ot:shadowJar" + CACHE_TYPE: "lib" + - GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" + - GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" +# FIXME: Gitlab doesn't support s3 based caches >5GB. Fixed in Gitlab 17.5 +# See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921#note_2132307223 +# - GRADLE_TARGET: ":instrumentationTest" +# CACHE_TYPE: "inst" +# - GRADLE_TARGET: ":instrumentationLatestDepTest" +# CACHE_TYPE: "latestdep" +# - GRADLE_TARGET: ":smokeTest" +# CACHE_TYPE: "smoke" + +publish-artifacts-to-s3: + image: registry.ddbuild.io/images/mirror/amazon/aws-cli:2.4.29 + stage: publish + needs: [ build ] + script: + - source upstream.env + - export VERSION="${UPSTREAM_TRACER_VERSION%~*}" # remove ~githash from the end of version + - aws s3 cp workspace/dd-java-agent/build/libs/dd-java-agent-${VERSION}.jar s3://dd-trace-java-builds/${CI_COMMIT_REF_NAME}/dd-java-agent.jar + - aws s3 cp workspace/dd-trace-api/build/libs/dd-trace-api-${VERSION}.jar s3://dd-trace-java-builds/${CI_COMMIT_REF_NAME}/dd-trace-api.jar + - aws s3 cp workspace/dd-trace-ot/build/libs/dd-trace-ot-${VERSION}.jar s3://dd-trace-java-builds/${CI_COMMIT_REF_NAME}/dd-trace-ot.jar + - aws s3 cp workspace/dd-java-agent/build/libs/dd-java-agent-${VERSION}.jar s3://dd-trace-java-builds/${CI_PIPELINE_ID}/dd-java-agent.jar + - aws s3 cp workspace/dd-trace-api/build/libs/dd-trace-api-${VERSION}.jar s3://dd-trace-java-builds/${CI_PIPELINE_ID}/dd-trace-api.jar + - aws s3 cp workspace/dd-trace-ot/build/libs/dd-trace-ot-${VERSION}.jar s3://dd-trace-java-builds/${CI_PIPELINE_ID}/dd-trace-ot.jar + - | + cat << EOF > links.json + { + "S3 Links": [ + { + "external_link": { + "label": "Public Link to dd-java-agent.jar", + "url": "https://s3.us-east-1.amazonaws.com/dd-trace-java-builds/${CI_PIPELINE_ID}/dd-java-agent.jar" + } + } + ] + } + EOF + artifacts: + reports: + annotations: + - links.json + spotless: extends: .gradle_build @@ -130,11 +277,11 @@ spotless: test_published_artifacts: extends: .gradle_build - image: ghcr.io/datadog/dd-trace-java-docker-build:${JAVA_BUILD_IMAGE_VERSION}-7 # Needs Java7 for some tests + image: ghcr.io/datadog/dd-trace-java-docker-build:${BUILDER_IMAGE_VERSION_PREFIX}7 # Needs Java7 for some tests stage: tests needs: [ build ] variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib script: - mvn_local_repo=$(./mvnw help:evaluate -Dexpression=settings.localRepository -q -DforceStdout) - rm -rf "${mvn_local_repo}/com/datadoghq" @@ -143,10 +290,14 @@ test_published_artifacts: - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx2G -Xms2G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew publishToMavenLocal $GRADLE_ARGS - cd test-published-dependencies - - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx512M -Xms512M -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xmx1G -Xms1G -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp'" - ./gradlew check --info $GRADLE_ARGS after_script: + - *cgroup_info + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh + - gitlab_section_end "collect-reports" artifacts: when: always paths: @@ -157,16 +308,29 @@ test_published_artifacts: needs: [ build ] stage: tests variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib script: - ./gradlew $GRADLE_TARGET -PskipTests -PrunBuildSrcTests -PskipSpotless -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS after_script: + - *cgroup_info + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh --destination ./check_reports --move + - gitlab_section_end "collect-reports" artifacts: when: always paths: - ./check_reports - '.gradle/daemon/*/*.out.log' + retry: + max: 2 + when: + - unknown_failure + - stuck_or_timeout_failure + - runner_system_failure + - unmet_prerequisites + - scheduler_failure + - data_integrity_failure check_base: extends: .check_job @@ -197,19 +361,23 @@ check_debugger: muzzle: extends: .gradle_build - needs: [ build ] + needs: [ build_tests ] stage: tests parallel: 8 variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: inst script: - export SKIP_BUILDSCAN="true" - ./gradlew writeMuzzleTasksToFile $GRADLE_ARGS - sort workspace/build/muzzleTasks > sortedMuzzleTasks - split --number=l/$NORMALIZED_NODE_TOTAL --suffix-length=1 --numeric-suffixes sortedMuzzleTasks muzzleSplit - - ./gradlew `cat muzzleSplit${NORMALIZED_NODE_INDEX} | xargs` $GRADLE_ARGS + - ./gradlew $(cat muzzleSplit${NORMALIZED_NODE_INDEX} | xargs) $GRADLE_ARGS after_script: + - *cgroup_info + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh + - gitlab_section_end "collect-reports" artifacts: when: always paths: @@ -218,14 +386,15 @@ muzzle: muzzle-dep-report: extends: .gradle_build - needs: [ build ] + needs: [ build_tests ] stage: tests variables: - BUILD_CACHE_TYPE: test + CACHE_TYPE: inst script: - export SKIP_BUILDSCAN="true" - ./gradlew generateMuzzleReport muzzleInstrumentationReport $GRADLE_ARGS after_script: + - *cgroup_info - .circleci/collect_muzzle_deps.sh artifacts: when: always @@ -250,30 +419,51 @@ muzzle-dep-report: .test_job: extends: .gradle_build - image: ghcr.io/datadog/dd-trace-java-docker-build:$testJvm - needs: [ build ] + image: ghcr.io/datadog/dd-trace-java-docker-build:${BUILDER_IMAGE_VERSION_PREFIX}$testJvm + tags: [ "docker-in-docker:amd64" ] # use docker-in-docker runner for testcontainers + needs: [ build_tests ] stage: tests variables: - BUILD_CACHE_TYPE: lib - GRADLE_PARAMS: "" + KUBERNETES_MEMORY_REQUEST: 17Gi + KUBERNETES_MEMORY_LIMIT: 17Gi + KUBERNETES_CPU_REQUEST: 10 + GRADLE_WORKERS: 4 + GRADLE_MEM: 3G + GRADLE_PARAMS: "-PskipFlakyTests" CONTINUE_ON_FAILURE: "false" + TESTCONTAINERS_CHECKS_DISABLE: "true" + TESTCONTAINERS_RYUK_DISABLED: "true" + TESTCONTAINERS_HUB_IMAGE_NAME_PREFIX: "registry.ddbuild.io/images/mirror/" + JETTY_AVAILABLE_PROCESSORS: 4 # Jetty incorrectly calculates processor count in containers + rules: + - if: $testJvm =~ $DEFAULT_TEST_JVMS + when: on_success + - if: $NON_DEFAULT_JVMS == "true" + when: on_success + - if: $CI_COMMIT_BRANCH == "master" + when: on_success script: - - > + - > if [ "$PROFILE_TESTS" == "true" ] && [ "$testJvm" != "ibm8" ] && [ "$testJvm" != "oracle8" ]; then export PROFILER_COMMAND="-XX:StartFlightRecording=settings=profile,filename=/tmp/${CI_JOB_NAME_SLUG}.jfr,dumponexit=true"; fi - *prepare_test_env - - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms2G -Xmx2G $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=768M -Ddatadog.forkedMinHeapSize=128M" - - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE + - export GRADLE_OPTS="-Dorg.gradle.jvmargs='-Xms$GRADLE_MEM -Xmx$GRADLE_MEM $PROFILER_COMMAND -XX:ErrorFile=/tmp/hs_err_pid%p.log -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/tmp' -Ddatadog.forkedMaxHeapSize=1024M -Ddatadog.forkedMinHeapSize=128M" + - ./gradlew $GRADLE_TARGET $GRADLE_PARAMS -PtestJvm=$testJvm -PtaskPartitionCount=$NORMALIZED_NODE_TOTAL -PtaskPartition=$NORMALIZED_NODE_INDEX $GRADLE_ARGS --continue || $CONTINUE_ON_FAILURE after_script: - *restore_pretest_env - *set_datadog_api_keys + - *cgroup_info + - source .gitlab/gitlab-utils.sh + - gitlab_section_start "collect-reports" "Collecting reports" - .circleci/collect_reports.sh - if [ "$PROFILE_TESTS" == "true" ]; then .circleci/collect_profiles.sh; fi - .circleci/collect_results.sh - - .circleci/upload_ciapp.sh tests $testJvm -# TODO Get APM Test Agent Trace Check Results + - .circleci/upload_ciapp.sh $CACHE_TYPE $testJvm + - gitlab_section_end "collect-reports" + - URL_ENCODED_JOB_NAME=$(jq -rn --arg x "$CI_JOB_NAME" '$x|@uri') + - echo -e "${TEXT_BOLD}${TEXT_YELLOW}See test results in Datadog:${TEXT_CLEAR} https://app.datadoghq.com/ci/test/runs?query=test_level%3Atest%20%40test.service%3Add-trace-java%20%40ci.pipeline.id%3A${CI_PIPELINE_ID}%20%40ci.job.name%3A%22${URL_ENCODED_JOB_NAME}%22" artifacts: when: always paths: @@ -281,13 +471,45 @@ muzzle-dep-report: - ./profiles.tar - ./results - '.gradle/daemon/*/*.out.log' + reports: + junit: results/*.xml + retry: + max: 2 + when: + - unknown_failure + - stuck_or_timeout_failure + - runner_system_failure + - unmet_prerequisites + - scheduler_failure + - data_integrity_failure + +.test_job_with_test_agent: + extends: .test_job + variables: + CI_USE_TEST_AGENT: "true" + CI_AGENT_HOST: local-agent + services: + - name: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.11.0 + alias: local-agent + variables: + LOG_LEVEL: "DEBUG" + TRACE_LANGUAGE: "java" + DD_SUPPRESS_TRACE_PARSE_ERRORS: "true" + DD_POOL_TRACE_CHECK_FAILURES: "true" + DD_DISABLE_ERROR_RESPONSES: "true" + ENABLED_CHECKS: "trace_content_length,trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service,trace_dd_service" + script: + - !reference [.test_job, script] + - .gitlab/check_test_agent_results.sh agent_integration_tests: extends: .test_job + tags: [ "arch:amd64" ] variables: testJvm: "8" CI_AGENT_HOST: local-agent GRADLE_TARGET: "traceAgentTest" + CACHE_TYPE: "base" services: - name: datadog/agent:7.34.0 alias: local-agent @@ -296,27 +518,128 @@ agent_integration_tests: DD_BIND_HOST: "0.0.0.0" DD_API_KEY: "invalid_key_but_this_is_fine" -required: - extends: .fan_in - needs: - - job: spotless - optional: true - - job: muzzle - optional: true - - job: test_published_artifacts - optional: true - - job: agent_integration_tests - optional: true - - job: check_base - optional: true - - job: check_inst - optional: true - - job: check_smoke - optional: true - - job: check_profiling - optional: true - - job: check_debugger - optional: true +test_base: + extends: .test_job + variables: + GRADLE_TARGET: ":baseTest" + CACHE_TYPE: "base" + parallel: + matrix: *test_matrix_4 + script: + - if [ "$testJvm" == "8" ]; then export GRADLE_PARAMS="-PskipFlakyTests -PcheckCoverage"; fi + - !reference [.test_job, script] + +test_inst: + extends: .test_job_with_test_agent + variables: + GRADLE_TARGET: ":instrumentationTest" + CACHE_TYPE: "inst" + parallel: + matrix: *test_matrix_6 + +test_inst_latest: + extends: .test_job_with_test_agent + variables: + GRADLE_TARGET: ":instrumentationLatestDepTest" + CACHE_TYPE: "latestDep" + parallel: + matrix: + - testJvm: ["8", "17", "21" ] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_SPLIT: [ "1/6", "2/6", "3/6", "4/6", "5/6", "6/6"] + +test_flaky: + extends: .test_job_with_test_agent + variables: + GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "base" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + rules: + - *master_only + - if: $RUN_FLAKY_TESTS == "true" + when: on_success + parallel: + matrix: + - GRADLE_TARGET: [":baseTest", ":smokeTest", ":debuggerTest"] + # Gitlab doesn't support "parallel" and "parallel:matrix" at the same time + # This emulates "parallel" by including it in the matrix + CI_SPLIT: [ "1/4", "2/4", "3/4", "4/4" ] + +test_flaky_inst: + extends: .test_job + variables: + GRADLE_TARGET: ":instrumentationTest" + GRADLE_PARAMS: "-PrunFlakyTests" + CACHE_TYPE: "inst" + testJvm: "8" + CONTINUE_ON_FAILURE: "true" + rules: + - *master_only + - if: $RUN_FLAKY_TESTS == "true" + when: on_success + parallel: 6 + +test_profiling: + extends: .test_job + variables: + GRADLE_TARGET: ":profilingTest" + CACHE_TYPE: "profiling" + parallel: + matrix: *test_matrix + +# specific jvms list for debugger project because J9-based JVMs have issues with local vars +# so need to test at least against one J9-based JVM +test_debugger: + extends: .test_job + variables: + GRADLE_TARGET: ":debuggerTest" + CACHE_TYPE: "base" + DEFAULT_TEST_JVMS: /^(8|11|17|21|semeru8)$/ + parallel: + matrix: *test_matrix + +test_smoke: + extends: .test_job + variables: + GRADLE_TARGET: "stageMainDist :smokeTest" + GRADLE_PARAMS: "-PskipFlakyTests" + CACHE_TYPE: "smoke" + parallel: + matrix: *test_matrix_4 + +test_ssi_smoke: + extends: .test_job + rules: *master_only + variables: + GRADLE_TARGET: "stageMainDist :smokeTest" + CACHE_TYPE: "smoke" + DD_INJECT_FORCE: "true" + DD_INJECTION_ENABLED: "tracer" + parallel: + matrix: *test_matrix_4 + +test_smoke_graalvm: + extends: .test_job + tags: [ "arch:amd64" ] + variables: + GRADLE_TARGET: "stageMainDist :dd-smoke-test:spring-boot-3.0-native:test" + CACHE_TYPE: "smoke" + CI_NO_SPLIT: "true" + NON_DEFAULT_JVMS: "true" + parallel: + matrix: + - testJvm: ["graalvm17", "graalvm21"] + +test_smoke_semeru8_debugger: + extends: .test_job + tags: [ "arch:amd64" ] + variables: + GRADLE_TARGET: "stageMainDist dd-smoke-tests:debugger-integration-tests:test" + CACHE_TYPE: "smoke" + NON_DEFAULT_JVMS: "true" + testJvm: "semeru8" deploy_to_profiling_backend: stage: publish @@ -394,7 +717,7 @@ deploy_to_sonatype: stage: publish needs: [ build ] variables: - BUILD_CACHE_TYPE: lib + CACHE_TYPE: lib rules: - if: '$POPULATE_CACHE' when: never @@ -473,37 +796,3 @@ create_key: expire_in: 13 mos paths: - pubkeys - -tracer-base-image-release: - extends: .ci_authenticated_job - stage: publish - needs: [ build ] - rules: - - if: '$POPULATE_CACHE' - when: never - - if: '$CI_COMMIT_TAG =~ /^v1\..*/' - when: on_success - dependencies: - - build - script: - - echo $GH_TOKEN|docker login ghcr.io/datadog -u uploader --password-stdin - - mkdir -p ./tooling/ci/binaries/ && cp workspace/dd-java-agent/build/libs/*.jar ./tooling/ci/binaries/dd-java-agent.jar - - docker buildx build -t ghcr.io/datadog/dd-trace-java/dd-trace-java:latest -f ./tooling/ci/Dockerfile . - - docker push ghcr.io/datadog/dd-trace-java/dd-trace-java:latest - -tracer-base-image-snapshot: - extends: .ci_authenticated_job - stage: publish - needs: [ build ] - rules: - - if: '$POPULATE_CACHE' - when: never - - if: '$CI_COMMIT_BRANCH == "master"' - when: on_success - dependencies: - - build - script: - - echo $GH_TOKEN|docker login ghcr.io/datadog -u uploader --password-stdin - - mkdir -p ./tooling/ci/binaries/ && cp workspace/dd-java-agent/build/libs/*.jar ./tooling/ci/binaries/dd-java-agent.jar - - docker buildx build -t ghcr.io/datadog/dd-trace-java/dd-trace-java:latest_snapshot -f ./tooling/ci/Dockerfile . - - docker push ghcr.io/datadog/dd-trace-java/dd-trace-java:latest_snapshot diff --git a/.gitlab/benchmarks.yml b/.gitlab/benchmarks.yml index f6684356316..8b621365070 100644 --- a/.gitlab/benchmarks.yml +++ b/.gitlab/benchmarks.yml @@ -4,7 +4,7 @@ timeout: 1h tags: ["runner:apm-k8s-tweaked-metal"] image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:dd-trace-java-benchmarks - needs: [ "build" ] + needs: [ "build", "publish-artifacts-to-s3" ] rules: - if: '$POPULATE_CACHE' when: never @@ -14,9 +14,8 @@ - when: on_success script: - export ARTIFACTS_DIR="$(pwd)/reports" && mkdir -p "${ARTIFACTS_DIR}" - - export CIRCLE_CI_TOKEN=$(aws ssm get-parameter --region us-east-1 --name ci.dd-trace-java.circleci_token --with-decryption --query "Parameter.Value" --out text) - git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/".insteadOf "https://github.com/DataDog/" - - git clone --branch dd-trace-java/tracer-benchmarks https://github.com/DataDog/benchmarking-platform.git /platform && cd /platform + - git clone --branch dd-trace-java/tracer-benchmarks-parallel https://github.com/DataDog/benchmarking-platform.git /platform && cd /platform artifacts: name: "reports" paths: @@ -86,7 +85,7 @@ benchmarks-post-results: interruptible: true timeout: 1h image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:java-dsm-kafka - needs: [ "build" ] + needs: [ "build", "publish-artifacts-to-s3"] script: - git clone --branch java/kafka-dsm-overhead https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform.git platform && cd platform - ./steps/run-benchmarks.sh @@ -97,6 +96,10 @@ benchmarks-post-results: - platform/artifacts/ expire_in: 3 months variables: + UPSTREAM_PROJECT_ID: $CI_PROJECT_ID # The ID of the current project. This ID is unique across all projects on the GitLab instance. + UPSTREAM_PROJECT_NAME: $CI_PROJECT_NAME # "dd-trace-java" + UPSTREAM_BRANCH: $CI_COMMIT_REF_NAME # The branch or tag name for which project is built. + UPSTREAM_COMMIT_SHA: $CI_COMMIT_SHA # The commit revision the project is built for. FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY: "true" dsm-kafka-producer-benchmark: @@ -126,7 +129,7 @@ debugger-benchmarks: interruptible: true timeout: 1h image: 486234852809.dkr.ecr.us-east-1.amazonaws.com/ci/benchmarking-platform:java-debugger - needs: ["build"] + needs: ["build", "publish-artifacts-to-s3"] script: - export ARTIFACTS_DIR="$(pwd)/reports" && mkdir -p "${ARTIFACTS_DIR}" - git clone --branch java/debugger-benchmarks https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.ddbuild.io/DataDog/benchmarking-platform.git /platform && cd /platform diff --git a/.gitlab/cgroup-info.sh b/.gitlab/cgroup-info.sh new file mode 100755 index 00000000000..2ddedac326b --- /dev/null +++ b/.gitlab/cgroup-info.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +print_metric() { + local label="$1" + local raw_value="$2" + local trimmed_value + + # Use read -rd '' to trim leading/trailing IFS whitespace (space, tab, newline) + read -rd '' trimmed_value <<< "$raw_value" || : + + # Check if trimmed_value contains a newline character for formatting + if [[ "$trimmed_value" == *$'\n'* ]]; then + local indent=" " + # Using a more robust way to handle potential leading/trailing newlines in raw_value for printf + printf "%-35s :\n" "$label" + printf "%s\n" "$indent${trimmed_value//$'\n'/$'\n'$indent}" # Indent and print the value on new lines + else + printf "%-35s : %s\n" "$label" "$trimmed_value" + fi +} + +cat_file() { + cat "$1" 2>/dev/null || echo 'not found' +} + +# Show cgroup memory usage +print_metric "RAM memory" "$( (grep MemTotal /proc/meminfo | tr -s ' ' | cut -d ' ' -f 2) 2>/dev/null || echo 'not found')" + +if [ -f /sys/fs/cgroup/cgroup.controllers ]; then + # cgroup v2 + print_metric "cgroup v2 memory.peak" "$(cat_file /sys/fs/cgroup/memory.peak)" + print_metric "cgroup v2 memory.max" "$(cat_file /sys/fs/cgroup/memory.max)" + print_metric "cgroup v2 memory.high" "$(cat_file /sys/fs/cgroup/memory.high)" + print_metric "cgroup v2 memory.current" "$(cat_file /sys/fs/cgroup/memory.current)" + if [ -f /sys/fs/cgroup/memory.pressure ]; then + print_metric "cgroup v2 memory.pressure" "$(cat_file /sys/fs/cgroup/memory.pressure)" + fi + if [ -f /sys/fs/cgroup/memory.events ]; then + print_metric "cgroup v2 memory.events oom" "$( (grep -E '^oom\\s' /sys/fs/cgroup/memory.events | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + print_metric "cgroup v2 memory.events oom_kill" "$( (grep -E '^oom_kill\\s' /sys/fs/cgroup/memory.events | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + print_metric "cgroup v2 memory.events high" "$( (grep -E '^high\\s' /sys/fs/cgroup/memory.events | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + fi + + # CPU metrics + print_metric "cgroup v2 cpu.max" "$(cat_file /sys/fs/cgroup/cpu.max)" + print_metric "cgroup v2 cpu.nr_throttled" "$( (grep -E "^nr_throttled[[:space:]]+" /sys/fs/cgroup/cpu.stat | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + print_metric "cgroup v2 cpu.throttled_usec" "$( (grep -E "^throttled_usec[[:space:]]+" /sys/fs/cgroup/cpu.stat | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + print_metric "cgroup v2 cpu.usage_usec" "$( (grep -E "^usage_usec[[:space:]]+" /sys/fs/cgroup/cpu.stat | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + if [ -f /sys/fs/cgroup/cpu.pressure ]; then # cpu.pressure might not exist on older kernels/setups + print_metric "cgroup v2 cpu.pressure" "$(cat_file /sys/fs/cgroup/cpu.pressure)" + fi + +elif [ -d "/sys/fs/cgroup/memory" ]; then # Assuming if memory cgroup v1 exists, cpu might too + # cgroup v1 + # Note: In cgroup v1, memory stats are typically found under /sys/fs/cgroup/memory/ + # The specific path might vary if inside a nested cgroup. + # This script assumes it's running in a context where /sys/fs/cgroup/memory/ points to the relevant cgroup. + print_metric "cgroup v1 memory.usage_in_bytes" "$(cat_file /sys/fs/cgroup/memory/memory.usage_in_bytes)" + print_metric "cgroup v1 memory.limit_in_bytes" "$(cat_file /sys/fs/cgroup/memory/memory.limit_in_bytes)" + print_metric "cgroup v1 memory.failcnt" "$(cat_file /sys/fs/cgroup/memory/memory.failcnt)" + print_metric "cgroup v1 memory.max_usage_in_bytes" "$(cat_file /sys/fs/cgroup/memory/memory.max_usage_in_bytes)" + + # Throttling stats from /sys/fs/cgroup/cpu/cpu.stat + if [ -f /sys/fs/cgroup/cpu/cpu.stat ]; then + print_metric "cgroup v1 cpu.nr_throttled" "$( (grep -E "^nr_throttled[[:space:]]+" /sys/fs/cgroup/cpu/cpu.stat | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + print_metric "cgroup v1 cpu.throttled_time_ns" "$( (grep -E "^throttled_time[[:space:]]+" /sys/fs/cgroup/cpu/cpu.stat | cut -d' ' -f2) 2>/dev/null || echo 'not found')" + else + # Print not found for these specific metrics if cpu.stat is missing, to avoid ambiguity + print_metric "cgroup v1 cpu.nr_throttled" "not found (cpu.stat)" + print_metric "cgroup v1 cpu.throttled_time_ns" "not found (cpu.stat)" + fi + # CPU Quota settings from /sys/fs/cgroup/cpu/ + print_metric "cgroup v1 cpu.cfs_period_us" "$(cat_file /sys/fs/cgroup/cpu/cpu.cfs_period_us)" + print_metric "cgroup v1 cpu.cfs_quota_us" "$(cat_file /sys/fs/cgroup/cpu/cpu.cfs_quota_us)" + # CPU usage from /sys/fs/cgroup/cpuacct/ (usually same hierarchy as cpu) + print_metric "cgroup v1 cpuacct.usage_ns" "$(cat_file /sys/fs/cgroup/cpuacct/cpuacct.usage)" + print_metric "cgroup v1 cpuacct.usage_user_ns" "$(cat_file /sys/fs/cgroup/cpuacct/cpuacct.usage_user)" + print_metric "cgroup v1 cpuacct.usage_sys_ns" "$(cat_file /sys/fs/cgroup/cpuacct/cpuacct.usage_sys)" + +else + printf "cgroup memory paths not found. Neither cgroup v2 controller file nor cgroup v1 memory directory detected.\n" +fi + diff --git a/.gitlab/check_test_agent_results.sh b/.gitlab/check_test_agent_results.sh new file mode 100755 index 00000000000..cfbc8f098be --- /dev/null +++ b/.gitlab/check_test_agent_results.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +set +e # Disable exiting from testagent response failure +SUMMARY_RESPONSE=$(curl -s -w "\n%{http_code}" -o summary_response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/summary") +set -e +SUMMARY_RESPONSE_CODE=$(echo "$SUMMARY_RESPONSE" | awk 'END {print $NF}') + +if [[ SUMMARY_RESPONSE_CODE -eq 200 ]]; then + echo "APM Test Agent is running. (HTTP 200)" +else + echo "APM Test Agent is not running and was not used for testing. No checks failed." + exit 0 +fi + +RESPONSE=$(curl -s -w "\n%{http_code}" -o response.txt "http://${CI_AGENT_HOST}:8126/test/trace_check/failures") +RESPONSE_CODE=$(echo "$RESPONSE" | awk 'END {print $NF}') + +if [[ $RESPONSE_CODE -eq 200 ]]; then + echo "All APM Test Agent Check Traces returned successful! (HTTP 200)" + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' +elif [[ $RESPONSE_CODE -eq 404 ]]; then + echo "Real APM Agent running in place of TestAgent, no checks to validate!" +else + echo "APM Test Agent Check Traces failed with response code: $RESPONSE_CODE" + echo "Failures:" + cat response.txt + echo "APM Test Agent Check Traces Summary Results:" + cat summary_response.txt | jq '.' + exit 1 +fi diff --git a/.gitlab/exploration-tests.yml b/.gitlab/exploration-tests.yml index a25106e94d5..5c1e07173e2 100644 --- a/.gitlab/exploration-tests.yml +++ b/.gitlab/exploration-tests.yml @@ -45,7 +45,7 @@ build-exploration-tests-image: - "*_surefire-reports.tar.gz" - "*_debugger-dumps.tar.gz" -exploration-tests-jsoup: +exploration-tests-method-jsoup: needs: [ build ] dependencies: - build @@ -53,9 +53,29 @@ exploration-tests-jsoup: variables: PROJECT: jsoup script: - - ./run-exploration-tests.sh "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" + - ./run-exploration-tests.sh "method" "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" -exploration-tests-jackson-core: +exploration-tests-line-jsoup: + needs: [ build ] + dependencies: + - build + <<: *common-exploration-tests + variables: + PROJECT: jsoup + script: + - ./run-exploration-tests.sh "line" "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" + +exploration-tests-method-jackson-core: + needs: [ build ] + dependencies: + - build + <<: *common-exploration-tests + variables: + PROJECT: jackson-core + script: + - ./run-exploration-tests.sh "method" "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" + +exploration-tests-line-jackson-core: needs: [ build ] dependencies: - build @@ -63,9 +83,19 @@ exploration-tests-jackson-core: variables: PROJECT: jackson-core script: - - ./run-exploration-tests.sh "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" + - ./run-exploration-tests.sh "line" "$PROJECT" "mvn verify" "include_${PROJECT}.txt" "exclude_${PROJECT}.txt" + +exploration-tests-method-jackson-databind: + needs: [ build ] + dependencies: + - build + <<: *common-exploration-tests + variables: + PROJECT: jackson-databind + script: + - ./run-exploration-tests.sh "method" "$PROJECT" "./mvnw verify" "include_${PROJECT}.txt" "exclude_$PROJECT.txt" -exploration-tests-jackson-databind: +exploration-tests-line-jackson-databind: needs: [ build ] dependencies: - build @@ -73,4 +103,4 @@ exploration-tests-jackson-databind: variables: PROJECT: jackson-databind script: - - ./run-exploration-tests.sh "$PROJECT" "./mvnw verify" "exclude_$PROJECT.txt" + - ./run-exploration-tests.sh "line" "$PROJECT" "./mvnw verify" "include_${PROJECT}.txt" "exclude_line_$PROJECT.txt" diff --git a/.gitlab/gitlab-utils.sh b/.gitlab/gitlab-utils.sh new file mode 100755 index 00000000000..59640da744b --- /dev/null +++ b/.gitlab/gitlab-utils.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# From https://docs.gitlab.com/ci/jobs/job_logs/#use-a-script-to-improve-display-of-collapsible-sections +# function for starting the section +function gitlab_section_start () { + local section_title="${1}" + local section_description="${2:-$section_title}" + + echo -e "section_start:`date +%s`:${section_title}[collapsed=true]\r\e[0K${section_description}" +} + +# Function for ending the section +function gitlab_section_end () { + local section_title="${1}" + + echo -e "section_end:`date +%s`:${section_title}\r\e[0K" +} + +# A subset of ansi color/formatting codes https://misc.flogisoft.com/bash/tip_colors_and_formatting +export TEXT_RED="\e[31m" +export TEXT_GREEN="\e[32m" +export TEXT_YELLOW="\e[33m" +export TEXT_BLUE="\e[34m" +export TEXT_MAGENTA="\e[35m" +export TEXT_CYAN="\e[36m" +export TEXT_CLEAR="\e[0m" +export TEXT_BOLD="\e[1m" diff --git a/.gitlab/one-pipeline.locked.yml b/.gitlab/one-pipeline.locked.yml new file mode 100644 index 00000000000..d88d10f06bf --- /dev/null +++ b/.gitlab/one-pipeline.locked.yml @@ -0,0 +1,4 @@ +# DO NOT EDIT THIS FILE MANUALLY +# This file is auto-generated by automation. +include: + - remote: https://gitlab-templates.ddbuild.io/libdatadog/one-pipeline/ca/f2050f53c1f5aed62a24e6b406c746e7d593230ce02b5d56d2a2296db763ebf4/one-pipeline.yml diff --git a/benchmark/Dockerfile b/benchmark/Dockerfile index c6bb8e853ed..0279186478a 100644 --- a/benchmark/Dockerfile +++ b/benchmark/Dockerfile @@ -1,14 +1,20 @@ # Petclinic download and compilation stage FROM eclipse-temurin:17-jammy as petclinic +ARG SPRING_PETCLINIC_COMMIT=cefaf55dd124d0635abfe857c3c99a3d3ea62017 + RUN apt-get update \ && apt-get -y install git \ && apt-get -y clean \ && rm -rf /var/lib/apt/lists/* -RUN git clone --depth 1 --branch main --single-branch https://github.com/spring-projects/spring-petclinic.git \ - && cd spring-petclinic \ - && ./mvnw dependency:go-offline +RUN set -eux;\ + git init spring-petclinic;\ + cd spring-petclinic;\ + git remote add origin https://github.com/spring-projects/spring-petclinic.git;\ + git fetch --depth 1 origin ${SPRING_PETCLINIC_COMMIT};\ + git checkout ${SPRING_PETCLINIC_COMMIT};\ + ./mvnw dependency:go-offline RUN cd spring-petclinic \ && ./mvnw package -Dmaven.test.skip=true \ diff --git a/benchmark/benchmarks.sh b/benchmark/benchmarks.sh index fac7c3f3fbb..0b245038afa 100755 --- a/benchmark/benchmarks.sh +++ b/benchmark/benchmarks.sh @@ -33,10 +33,6 @@ if [[ ! -f "${TRACER}" ]]; then cd "${SCRIPT_DIR}" fi -# Cleanup previous reports -rm -rf "${REPORTS_DIR}" -mkdir -p "${REPORTS_DIR}" - if [[ "$#" == '0' ]]; then for type in 'startup' 'load' 'dacapo'; do run_benchmarks "$type" diff --git a/benchmark/load/insecure-bank/benchmark.json b/benchmark/load/insecure-bank/benchmark.json deleted file mode 100644 index 44f85867b1e..00000000000 --- a/benchmark/load/insecure-bank/benchmark.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "load_insecure-bank", - "setup": "bash -c \"mkdir -p ${OUTPUT_DIR}/${VARIANT}\"", - "service": "bash -c \"${UTILS_DIR}/run-k6-load-test.sh http://localhost:8080/login ${OUTPUT_DIR}/${VARIANT} 'pkill java'\"", - "run": "bash -c \"java ${JAVA_OPTS} -Xms3G -Xmx3G -jar ${INSECURE_BANK} &> ${OUTPUT_DIR}/${VARIANT}/insecure-bank.log\"", - "timeout": 150, - "iterations": 1, - "variants": { - "${NO_AGENT_VARIANT}": { - "env": { - "VARIANT": "${NO_AGENT_VARIANT}", - "JAVA_OPTS": "" - } - }, - "tracing": { - "env": { - "VARIANT": "tracing", - "JAVA_OPTS": "-javaagent:${TRACER}" - } - }, - "iast": { - "env": { - "VARIANT": "iast", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true" - } - }, - "iast_GLOBAL": { - "env": { - "VARIANT": "iast_GLOBAL", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.context.mode=GLOBAL" - } - }, - "iast_FULL": { - "env": { - "VARIANT": "iast_FULL", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.detection.mode=FULL" - } - }, - "iast_INACTIVE": { - "env": { - "VARIANT": "iast_INACTIVE", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=inactive" - } - }, - "iast_TELEMETRY_OFF": { - "env": { - "VARIANT": "iast_TELEMETRY_OFF", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.telemetry.verbosity=OFF" - } - }, - "iast_HARDCODED_SECRET_DISABLED": { - "env": { - "VARIANT": "iast_HARDCODED_SECRET_DISABLED", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.hardcoded-secret.enabled=false" - } - } - } -} diff --git a/benchmark/load/insecure-bank/k6.js b/benchmark/load/insecure-bank/k6.js index 993ac6639f7..2dc5ce582c6 100644 --- a/benchmark/load/insecure-bank/k6.js +++ b/benchmark/load/insecure-bank/k6.js @@ -1,18 +1,62 @@ import http from 'k6/http'; import {checkResponse, isOk, isRedirect} from "../../utils/k6.js"; -const baseUrl = 'http://localhost:8080'; +const variants = { + "no_agent": { + "APP_URL": 'http://localhost:8080', + }, + "tracing": { + "APP_URL": 'http://localhost:8081', + }, + "profiling": { + "APP_URL": 'http://localhost:8082', + }, + "iast": { + "APP_URL": 'http://localhost:8083', + }, + "iast_GLOBAL": { + "APP_URL": 'http://localhost:8084', + }, + "iast_FULL": { + "APP_URL": 'http://localhost:8085', + }, +} + +export const options = function (variants) { + let scenarios = {}; + for (const variant of Object.keys(variants)) { + scenarios[`load--insecure-bank--${variant}--warmup`] = { + executor: 'constant-vus', // https://grafana.com/docs/k6/latest/using-k6/scenarios/executors/#all-executors + vus: 5, + duration: '20s', + gracefulStop: '2s', + env: { + "APP_URL": variants[variant]["APP_URL"] + } + }; + + scenarios[`load--insecure-bank--${variant}--high_load`] = { + executor: 'constant-vus', + vus: 5, + startTime: '22s', + duration: '15s', + gracefulStop: '2s', + env: { + "APP_URL": variants[variant]["APP_URL"] + } + }; + } -export const options = { - discardResponseBodies: true, - vus: 5, - iterations: 40000 -}; + return { + discardResponseBodies: true, + scenarios, + } +}(variants); export default function () { // login form - const loginResponse = http.post(`${baseUrl}/login`, { + const loginResponse = http.post(`${__ENV.APP_URL}/login`, { username: 'john', password: 'test' }, { @@ -21,11 +65,11 @@ export default function () { checkResponse(loginResponse, isRedirect); // dashboard - const dashboard = http.get(`${baseUrl}/dashboard`); + const dashboard = http.get(`${__ENV.APP_URL}/dashboard`); checkResponse(dashboard, isOk); // logout - const logout = http.get(`${baseUrl}/j_spring_security_logout`, { + const logout = http.get(`${__ENV.APP_URL}/j_spring_security_logout`, { redirects: 0 }); checkResponse(logout, isRedirect); diff --git a/benchmark/load/insecure-bank/start-servers.sh b/benchmark/load/insecure-bank/start-servers.sh new file mode 100755 index 00000000000..4cae95567f2 --- /dev/null +++ b/benchmark/load/insecure-bank/start-servers.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +set -e + +start_server() { + local VARIANT=$1 + local JAVA_OPTS=$2 + + if [ -n "$CI_JOB_TOKEN" ]; then + # Inside BP, so we can assume 24 CPU cores available and set CPU affinity + CPU_AFFINITY_APP=$3 + else + CPU_AFFINITY_APP="" + fi + + mkdir -p "${LOGS_DIR}/${VARIANT}" + ${CPU_AFFINITY_APP}java ${JAVA_OPTS} -Xms3G -Xmx3G -jar ${INSECURE_BANK} &> ${LOGS_DIR}/${VARIANT}/insecure-bank.log &PID=$! + echo "${CPU_AFFINITY_APP}java ${JAVA_OPTS} -Xms3G -Xmx3G -jar ${INSECURE_BANK} &> ${LOGS_DIR}/${VARIANT}/insecure-bank.log [PID=$PID]" +} + +start_server "no_agent" "-Dserver.port=8080" "taskset -c 47 " & +start_server "tracing" "-javaagent:${TRACER} -Dserver.port=8081" "taskset -c 46 " & +start_server "profiling" "-javaagent:${TRACER} -Ddd.profiling.enabled=true -Dserver.port=8082" "taskset -c 45 " & +start_server "iast" "-javaagent:${TRACER} -Ddd.iast.enabled=true -Dserver.port=8083" "taskset -c 44 " & +start_server "iast_GLOBAL" "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.context.mode=GLOBAL -Dserver.port=8084" "taskset -c 43 " & +start_server "iast_FULL" "-javaagent:${TRACER} -Ddd.iast.enabled=true -Ddd.iast.detection.mode=FULL -Dserver.port=8085" "taskset -c 42 " & + +wait diff --git a/benchmark/load/petclinic/benchmark.json b/benchmark/load/petclinic/benchmark.json deleted file mode 100644 index 0ae862df717..00000000000 --- a/benchmark/load/petclinic/benchmark.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "name": "load_petclinic", - "setup": "bash -c \"mkdir -p ${OUTPUT_DIR}/${VARIANT}\"", - "service": "bash -c \"${UTILS_DIR}/run-k6-load-test.sh http://localhost:8080 ${OUTPUT_DIR}/${VARIANT} 'pkill java'\"", - "run": "bash -c \"java ${JAVA_OPTS} -Xms2G -Xmx2G -jar ${PETCLINIC} &> ${OUTPUT_DIR}/${VARIANT}/petclinic.log\"", - "timeout": 150, - "iterations": 1, - "variants": { - "${NO_AGENT_VARIANT}": { - "env": { - "VARIANT": "${NO_AGENT_VARIANT}", - "JAVA_OPTS": "" - } - }, - "tracing": { - "env": { - "VARIANT": "tracing", - "JAVA_OPTS": "-javaagent:${TRACER}" - } - }, - "profiling": { - "env": { - "VARIANT": "profiling", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.profiling.enabled=true" - } - }, - "appsec": { - "env": { - "VARIANT": "appsec", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.appsec.enabled=true" - } - }, - "appsec_no_iast": { - "env": { - "VARIANT": "appsec_no_iast", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.appsec.enabled=true -Ddd.iast.enabled=false" - } - }, - "iast": { - "env": { - "VARIANT": "iast", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.iast.enabled=true" - } - }, - "code_origins": { - "env": { - "VARIANT": "code_origins", - "JAVA_OPTS": "-javaagent:${TRACER} -Ddd.code.origin.for.spans.enabled=true" - } - } - } -} diff --git a/benchmark/load/petclinic/k6.js b/benchmark/load/petclinic/k6.js index 6eb465c3683..a215ad7ea2d 100644 --- a/benchmark/load/petclinic/k6.js +++ b/benchmark/load/petclinic/k6.js @@ -1,17 +1,61 @@ import http from 'k6/http'; import {checkResponse, isOk} from "../../utils/k6.js"; -const baseUrl = 'http://localhost:8080'; +const variants = { + "no_agent": { + "APP_URL": 'http://localhost:8080', + }, + "tracing": { + "APP_URL": 'http://localhost:8081', + }, + "profiling": { + "APP_URL": 'http://localhost:8082', + }, + "appsec": { + "APP_URL": 'http://localhost:8083', + }, + "iast": { + "APP_URL": 'http://localhost:8084', + }, + "code_origins": { + "APP_URL": 'http://localhost:8085', + } +} + +export const options = function (variants) { + const scenarios = {}; + for (const variant of Object.keys(variants)) { + scenarios[`load--petclinic--${variant}--warmup`] = { + executor: 'constant-vus', // https://grafana.com/docs/k6/latest/using-k6/scenarios/executors/#all-executors + vus: 5, + duration: '20s', + gracefulStop: '2s', + env: { + "APP_URL": variants[variant]["APP_URL"] + } + }; + + scenarios[`load--petclinic--${variant}--high_load`] = { + executor: 'constant-vus', + vus: 5, + startTime: '22s', + duration: '15s', + gracefulStop: '2s', + env: { + "APP_URL": variants[variant]["APP_URL"] + } + }; + } -export const options = { - discardResponseBodies: true, - vus: 5, - iterations: 80000 -}; + return { + discardResponseBodies: true, + scenarios, + } +}(variants); export default function () { // find owner - const ownersList = http.get(`${baseUrl}/owners?lastName=`); + const ownersList = http.get(`${__ENV.APP_URL}/owners?lastName=`); checkResponse(ownersList, isOk); } diff --git a/benchmark/load/petclinic/start-servers.sh b/benchmark/load/petclinic/start-servers.sh new file mode 100755 index 00000000000..1ebbb4e0418 --- /dev/null +++ b/benchmark/load/petclinic/start-servers.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +set -e + +start_server() { + local VARIANT=$1 + local JAVA_OPTS=$2 + + if [ -n "$CI_JOB_TOKEN" ]; then + # Inside BP, so we can assume 24 CPU cores available and set CPU affinity + CPU_AFFINITY_APP=$3 + else + CPU_AFFINITY_APP="" + fi + + mkdir -p "${LOGS_DIR}/${VARIANT}" + ${CPU_AFFINITY_APP}java ${JAVA_OPTS} -Xms2G -Xmx2G -jar ${PETCLINIC} &> ${LOGS_DIR}/${VARIANT}/petclinic.log &PID=$! + echo "${CPU_AFFINITY_APP}java ${JAVA_OPTS} -Xms2G -Xmx2G -jar ${PETCLINIC} &> ${LOGS_DIR}/${VARIANT}/petclinic.log [PID=$!]" +} + +start_server "no_agent" "-Dserver.port=8080" "taskset -c 31-32 " & +start_server "tracing" "-javaagent:${TRACER} -Dserver.port=8081" "taskset -c 33-34 " & +start_server "profiling" "-javaagent:${TRACER} -Ddd.profiling.enabled=true -Dserver.port=8082" "taskset -c 35-36 " & +start_server "appsec" "-javaagent:${TRACER} -Ddd.appsec.enabled=true -Dserver.port=8083" "taskset -c 37-38 " & +start_server "iast" "-javaagent:${TRACER} -Ddd.iast.enabled=true -Dserver.port=8084" "taskset -c 39-40 " & +start_server "code_origins" "-javaagent:${TRACER} -Ddd.code.origin.for.spans.enabled=true -Dserver.port=8085" "taskset -c 41-42 " & + +wait diff --git a/benchmark/load/run.sh b/benchmark/load/run.sh index 432c65d3fd5..d43a28383d5 100755 --- a/benchmark/load/run.sh +++ b/benchmark/load/run.sh @@ -1,5 +1,77 @@ #!/usr/bin/env bash -set -eu + +set -e + +function message() { + echo "$(date +"%T"): $1" +} + +function healthcheck() { + local url=$1 + + while true; do + if [[ $(curl -fso /dev/null -w "%{http_code}" "${url}") = 200 ]]; then + break + fi + done +} + +type=$1 + +if [ -n "$CI_JOB_TOKEN" ]; then + # Inside BP, so we can assume 24 CPU cores on the second socket available and set CPU affinity + export CPU_AFFINITY_K6="taskset -c 24-27 " +else + export CPU_AFFINITY_K6="" +fi source "${UTILS_DIR}/update-java-version.sh" 17 -"${UTILS_DIR}/run-sirun-benchmarks.sh" "$@" + +for app in *; do + if [[ ! -d "${app}" ]]; then + continue + fi + + message "${type} benchmark: ${app} started" + + export OUTPUT_DIR="${REPORTS_DIR}/${type}/${app}" + mkdir -p ${OUTPUT_DIR} + + export LOGS_DIR="${ARTIFACTS_DIR}/${type}/${app}" + mkdir -p ${LOGS_DIR} + + # Using profiler variants for healthcheck as they are the slowest + if [ "${app}" == "petclinic" ]; then + HEALTHCHECK_URL=http://localhost:8082 + REPETITIONS_COUNT=5 + elif [ "${app}" == "insecure-bank" ]; then + HEALTHCHECK_URL=http://localhost:8082/login + REPETITIONS_COUNT=2 + else + echo "Unknown app ${app}" + exit 1 + fi + + for i in $(seq 1 $REPETITIONS_COUNT); do + bash -c "${UTILS_DIR}/../${type}/${app}/start-servers.sh" & + + echo "Waiting for serves to start..." + if [ "${app}" == "petclinic" ]; then + for port in $(seq 8080 8085); do + healthcheck http://localhost:$port + done + elif [ "${app}" == "insecure-bank" ]; then + for port in $(seq 8080 8085); do + healthcheck http://localhost:$port/login + done + fi + echo "Servers are up!" + + ( + cd ${app} && + bash -c "${CPU_AFFINITY_K6}${UTILS_DIR}/run-k6-load-test.sh 'pkill java'" + ) + done + + message "${type} benchmark: ${app} finished" +done diff --git a/benchmark/startup/petclinic/benchmark.json b/benchmark/startup/petclinic/benchmark.json index 7b02b2939bc..23713c38469 100644 --- a/benchmark/startup/petclinic/benchmark.json +++ b/benchmark/startup/petclinic/benchmark.json @@ -24,12 +24,6 @@ "JAVA_OPTS": "-Ddd.appsec.enabled=true" } }, - "appsec_no_iast": { - "env": { - "VARIANT": "appsec", - "JAVA_OPTS": "-Ddd.appsec.enabled=true -Ddd.iast.enabled=false" - } - }, "iast": { "env": { "VARIANT": "iast", diff --git a/benchmark/utils/run-k6-load-test.sh b/benchmark/utils/run-k6-load-test.sh index 1d9753dc6e7..d3415f54eef 100755 --- a/benchmark/utils/run-k6-load-test.sh +++ b/benchmark/utils/run-k6-load-test.sh @@ -1,9 +1,7 @@ #!/usr/bin/env bash set -eu -url=$1 -output=$2 -command=$3 +command=$1 exit_code=0 cleanup() { @@ -14,14 +12,10 @@ cleanup() { trap cleanup EXIT ERR INT TERM -# wait for the HTTP server to be up -while true; do - if [[ $(curl -fso /dev/null -w "%{http_code}" "${url}") = 200 ]]; then - break - fi -done +echo "Starting k6 load test, logs are recorded into ${LOGS_DIR}/k6.log..." # run the k6 benchmark and store the result as JSON -k6 run k6.js --out "json=${output}/k6_$(date +%s).json" &>>"${output}/k6.log" - +k6 run k6.js --out "json=${OUTPUT_DIR}/k6_$(date +%s).json" > "${LOGS_DIR}/k6.log" 2>&1 exit_code=$? + +echo "k6 load test done !!!" diff --git a/benchmark/utils/update-java-version.sh b/benchmark/utils/update-java-version.sh index 2ea1c352841..3d76603e0ef 100755 --- a/benchmark/utils/update-java-version.sh +++ b/benchmark/utils/update-java-version.sh @@ -1,3 +1,5 @@ readonly target=$1 readonly NEW_PATH=$(echo "${PATH}" | sed -e "s@/usr/lib/jvm/[[:digit:]]\+@/usr/lib/jvm/${target}@g") export PATH="${NEW_PATH}" + +java --version diff --git a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java index 402ef4ecccf..92103b75fd4 100644 --- a/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java +++ b/dd-java-agent/agent-ci-visibility/src/main/java/datadog/trace/civisibility/utils/ShellCommandExecutor.java @@ -161,7 +161,11 @@ private T executeCommand( throw new TimeoutException( "Timeout while waiting for '" + String.join(" ", command) - + "'; " + + "'; in " + + executionFolder + + "\n StdOut: \n" + + IOUtils.readFully(inputStreamConsumer.read(), Charset.defaultCharset()) + + "\n StdErr: \n " + IOUtils.readFully(errorStreamConsumer.read(), Charset.defaultCharset())); } } catch (InterruptedException e) { diff --git a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config index 67fc9ed5f57..f145b18c5c0 100644 --- a/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config +++ b/dd-java-agent/agent-ci-visibility/src/test/resources/ci/git/shallow/git/config @@ -6,7 +6,7 @@ ignorecase = true precomposeunicode = true [remote "origin"] - url = git@github.com:Netflix/zuul.git + url = https://github.com/Netflix/zuul.git fetch = +refs/heads/master:refs/remotes/origin/master [branch "master"] remote = origin diff --git a/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy b/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy index 60ca0ad91f9..e42b2a4da5b 100644 --- a/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-dynamodb-2.0/src/test/groovy/DynamoDbClientTest.groovy @@ -29,7 +29,7 @@ import spock.lang.Shared import java.time.Duration class DynamoDbClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "dynamodb") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy index 60b2d63ecb6..4a4c445964e 100644 --- a/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-eventbridge-2.0/src/test/groovy/EventBridgeClientTest.groovy @@ -21,7 +21,7 @@ import java.time.Duration import java.util.concurrent.CompletableFuture class EventBridgeClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "sns,sqs,events") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy index f5e2c2f9352..b852b75c94d 100644 --- a/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-s3-2.0/src/test/groovy/S3ClientTest.groovy @@ -18,7 +18,7 @@ import spock.lang.Shared import java.time.Duration class S3ClientTest extends AgentTestRunner { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "s3") .withReuse(true) @@ -92,8 +92,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -151,8 +151,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$sourceKey") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$sourceKey") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -185,8 +185,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$destKey") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$destKey") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -276,8 +276,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "POST" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -303,8 +303,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -330,8 +330,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "PUT" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } @@ -365,8 +365,8 @@ class S3ClientTest extends AgentTestRunner { tag "bucketname", bucketName tag "http.method", "POST" tag "http.status_code", 200 - tag "http.url", { it.startsWith("http://localhost") && it.contains("/$key") } - tag "peer.hostname", "localhost" + tag "http.url", { it.startsWith("http://" + LOCALSTACK.getHost()) && it.contains("/$key") } + tag "peer.hostname", LOCALSTACK.getHost() tag "peer.port", { it instanceof Integer } tag "span.kind", "client" tag "aws.requestId", { it != null } diff --git a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy index 146d0085709..b321fb276ec 100644 --- a/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sdk-2.2/src/payloadTaggingTest/groovy/PayloadTaggingTest.groovy @@ -26,7 +26,7 @@ abstract class AbstractPayloadTaggingTest extends AgentTestRunner { static final Object NA = {} static final int DEFAULT_PORT = 4566 - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(DEFAULT_PORT) .withEnv("SERVICES", "apigateway,events,s3,sns,sqs,kinesis") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy index 3298ff48ee9..b90c4b4b131 100644 --- a/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sfn-2.0/src/test/groovy/SfnClientTest.groovy @@ -25,7 +25,7 @@ abstract class SfnClientTest extends VersionedNamingTestBase { @Shared Object endPoint def setupSpec() { - localStack = new GenericContainer(DockerImageName.parse("localstack/localstack")) + localStack = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) .withEnv("SERVICES", "stepfunctions") .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy index d008f7626f8..1653ecfa586 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-1.0/src/test/groovy/SnsClientTest.groovy @@ -27,7 +27,7 @@ import java.time.Duration abstract class SnsClientTest extends VersionedNamingTestBase { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) // Default LocalStack port .withEnv("SERVICES", "sns,sqs") // Enable SNS and SQS service .withReuse(true) diff --git a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy index c0066e3595e..3e40aa138dc 100644 --- a/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy +++ b/dd-java-agent/instrumentation/aws-java-sns-2.0/src/test/groovy/SnsClientTest.groovy @@ -24,7 +24,7 @@ import java.time.Duration import static datadog.trace.agent.test.utils.TraceUtils.basicSpan abstract class SnsClientTest extends VersionedNamingTestBase { - static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack")) + static final LOCALSTACK = new GenericContainer(DockerImageName.parse("localstack/localstack:4.2.0")) .withExposedPorts(4566) // Default LocalStack port .withEnv("SERVICES", "sns,sqs") // Enable SNS and SQS service .withReuse(true) diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy index 1c76f2a6cba..e37cda84896 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.1/src/test/groovy/CouchbaseClient31Test.groovy @@ -358,7 +358,7 @@ abstract class CouchbaseClient31Test extends VersionedNamingTestBase { it.tag(DDTags.ERROR_TYPE, ex.class.name) it.tag(DDTags.ERROR_STACK, String) } - "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" } + "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" || it == couchbase.getHost() } if (isLatestDepTest && extraTags != null) { tag('db.system','couchbase') diff --git a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy index 2a8cd522888..10150d91ef3 100644 --- a/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy +++ b/dd-java-agent/instrumentation/couchbase/couchbase-3.2/src/test/groovy/CouchbaseClient32Test.groovy @@ -422,7 +422,7 @@ abstract class CouchbaseClient32Test extends VersionedNamingTestBase { "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" 'couchbase' 'db.system' 'couchbase' - "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" } + "$InstrumentationTags.COUCHBASE_SEED_NODES" { it =="localhost" || it == "127.0.0.1" || it == couchbase.getHost() } if (isErrored) { it.tag(DDTags.ERROR_MSG, { exMessage.length() > 0 && ((String) it).startsWith(exMessage) }) it.tag(DDTags.ERROR_TYPE, ex.class.name) diff --git a/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy index f97bed3a7b2..d4e0ac21065 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-3.8/src/test/groovy/CassandraClientTest.groovy @@ -171,7 +171,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" diff --git a/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy index f97bed3a7b2..d4e0ac21065 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-3/src/test/groovy/CassandraClientTest.groovy @@ -171,7 +171,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" diff --git a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy index 35eabe01d9e..ba3aadb8773 100644 --- a/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy +++ b/dd-java-agent/instrumentation/datastax-cassandra-4/src/test/groovy/CassandraClientTest.groovy @@ -47,7 +47,7 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { container = new CassandraContainer("cassandra:4").withStartupTimeout(Duration.ofSeconds(120)) container.start() port = container.getMappedPort(9042) - address = new InetSocketAddress("127.0.0.1", port) + address = new InetSocketAddress(container.getHost(), port) runUnderTrace("setup") { Session session = sessionBuilder().build() @@ -247,12 +247,12 @@ abstract class CassandraClientTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-cassandra" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" container.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" port "$Tags.DB_TYPE" "cassandra" "$Tags.DB_INSTANCE" keyspace - "$InstrumentationTags.CASSANDRA_CONTACT_POINTS" "127.0.0.1:${port}" + "$InstrumentationTags.CASSANDRA_CONTACT_POINTS" "${container.contactPoint.hostString}:${container.contactPoint.port}" if (throwable != null) { errorTags(throwable) diff --git a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java index 1b39118b3e1..79aba1d601b 100644 --- a/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java +++ b/dd-java-agent/instrumentation/exception-profiling/src/test/java/datadog/trace/bootstrap/instrumentation/jfr/exceptions/ExceptionHistogramTest.java @@ -21,6 +21,7 @@ import jdk.jfr.FlightRecorder; import jdk.jfr.Recording; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.openjdk.jmc.common.item.Aggregators; @@ -60,9 +61,13 @@ public boolean equals(final Object obj) { private Recording snapshot; private ExceptionHistogram instance; + @BeforeAll + public static void precheck() { + assumeFalse(Platform.isJ9()); + } + @BeforeEach public void setup() { - assumeFalse(Platform.isJ9()); recording = new Recording(); recording.enable("datadog.ExceptionCount"); recording.start(); diff --git a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy index fe28d715bc1..cf24e0115bc 100644 --- a/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy +++ b/dd-java-agent/instrumentation/google-pubsub/src/test/groovy/PubSubTest.groovy @@ -279,7 +279,7 @@ abstract class PubSubTest extends VersionedNamingTestBase { if ({ isDataStreamsEnabled() }) { "$DDTags.PATHWAY_HASH" { String } } - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" emulator.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" { Integer } peerServiceFrom(Tags.RPC_SERVICE) diff --git a/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy b/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy index 581384efd86..92a7ceb50d7 100644 --- a/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy +++ b/dd-java-agent/instrumentation/jdbc/src/test/groovy/RemoteJDBCInstrumentationTest.groovy @@ -183,7 +183,7 @@ abstract class RemoteJDBCInstrumentationTest extends VersionedNamingTestBase { } def setupSpec() { - postgres = new PostgreSQLContainer("postgres:11.1") + postgres = new PostgreSQLContainer("postgres:11.2") .withDatabaseName(dbName.get(POSTGRESQL)).withUsername(jdbcUserNames.get(POSTGRESQL)).withPassword(jdbcPasswords.get(POSTGRESQL)) postgres.start() PortUtils.waitForPortToOpen(postgres.getHost(), postgres.getMappedPort(PostgreSQLContainer.POSTGRESQL_PORT), 5, TimeUnit.SECONDS) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy index df22d41839f..11b8aa8c12b 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.1-core-test/src/test/groovy/MongoCore31ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -70,7 +70,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -218,7 +218,7 @@ abstract class MongoCore31ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy index 4a798cdc1c5..e7eec08b185 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.1/src/test/groovy/MongoJava31ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -72,7 +72,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -220,7 +220,7 @@ abstract class MongoJava31ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy index 74bb354af88..c5ce4bd7806 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.10-sync-test/src/test/groovy/MongoSyncClientTest.groovy @@ -20,7 +20,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -52,7 +52,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -205,7 +205,7 @@ abstract class MongoSyncClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy index 032365e83b7..c870e9608f9 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.3-async-test/src/test/groovy/MongoAsyncClientTest.groovy @@ -27,7 +27,7 @@ abstract class MongoAsyncClientTest extends MongoBaseTest { .clusterSettings( ClusterSettings.builder() .description("some-description") - .applyConnectionString(new ConnectionString("mongodb://localhost:$port")) + .applyConnectionString(new ConnectionString("mongodb://${mongoDbContainer.getHost()}:$port")) .build()) .build()) } @@ -56,7 +56,7 @@ abstract class MongoAsyncClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName, toCallback {}) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy index 424748dabb0..d9f1b0f7313 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.4/src/test/groovy/MongoJava34ClientTest.groovy @@ -25,7 +25,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = new MongoClient(new ServerAddress("localhost", port), + client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), port), MongoClientOptions.builder() .description("some-description") .addCommandListener(new CommandListener() { @@ -70,7 +70,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = new MongoClient("localhost", port).getDatabase(databaseName) + MongoDatabase db = new MongoClient(mongoDbContainer.getHost(), port).getDatabase(databaseName) when: db.createCollection(collectionName) @@ -218,7 +218,7 @@ abstract class MongoJava34ClientTest extends MongoBaseTest { setup: String collectionName = randomCollectionName() def options = MongoClientOptions.builder().serverSelectionTimeout(10).build() - def client = new MongoClient(new ServerAddress("localhost", UNUSABLE_PORT), [], options) + def client = new MongoClient(new ServerAddress(mongoDbContainer.getHost(), UNUSABLE_PORT), [], options) when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy index 6d424fca2a4..7066defce6e 100644 --- a/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-3.7-core-test/src/test/groovy/MongoCore37ClientTest.groovy @@ -20,7 +20,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-instance") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-instance") } def cleanup() throws Exception { @@ -52,7 +52,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -205,7 +205,7 @@ abstract class MongoCore37ClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy index fa4de78c9a9..1757b1b8235 100644 --- a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/Mongo4ClientTest.groovy @@ -22,7 +22,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -64,7 +64,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName) @@ -217,7 +217,7 @@ abstract class Mongo4ClientTest extends MongoBaseTest { def "test client failure"() { setup: String collectionName = randomCollectionName() - def client = MongoClients.create("mongodb://localhost:$UNUSABLE_PORT/?serverselectiontimeoutms=10") + def client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$UNUSABLE_PORT/?serverselectiontimeoutms=10") when: MongoDatabase db = client.getDatabase(databaseName) diff --git a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy index d47f350c664..6211c2de1b3 100644 --- a/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy +++ b/dd-java-agent/instrumentation/mongo/driver-4.0/src/test/groovy/MongoReactiveClientTest.groovy @@ -27,7 +27,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { MongoClient client def setup() throws Exception { - client = MongoClients.create("mongodb://localhost:$port/?appname=some-description") + client = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port/?appname=some-description") } def cleanup() throws Exception { @@ -117,7 +117,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { def "test create collection no description"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: db.createCollection(collectionName).subscribe(toSubscriber {}) @@ -133,7 +133,7 @@ abstract class MongoReactiveClientTest extends MongoBaseTest { def "test create collection no description with parent"() { setup: String collectionName = randomCollectionName() - MongoDatabase db = MongoClients.create("mongodb://localhost:$port").getDatabase(databaseName) + MongoDatabase db = MongoClients.create("mongodb://${mongoDbContainer.getHost()}:$port").getDatabase(databaseName) when: runUnderTrace("parent") { diff --git a/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy b/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy index 2421b63153d..9d6ae857165 100644 --- a/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy +++ b/dd-java-agent/instrumentation/mongo/src/test/groovy/MongoBaseTest.groovy @@ -81,7 +81,7 @@ abstract class MongoBaseTest extends VersionedNamingTestBase { tags { "$Tags.COMPONENT" "java-mongo" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" mongoDbContainer.getHost() "$Tags.PEER_PORT" port "$Tags.DB_TYPE" dbType "$Tags.DB_INSTANCE" instance diff --git a/dd-java-agent/instrumentation/opensearch/rest/build.gradle b/dd-java-agent/instrumentation/opensearch/rest/build.gradle index 52b03d4655b..0ae2a557128 100644 --- a/dd-java-agent/instrumentation/opensearch/rest/build.gradle +++ b/dd-java-agent/instrumentation/opensearch/rest/build.gradle @@ -3,9 +3,8 @@ muzzle { pass { group = "org.opensearch.client" module = "opensearch-rest-client" - versions = "[1,)" + versions = "[1,2)" javaVersion = '11' - assertInverse = true } } diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy index abf5d5bb63f..f112598c75e 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.0.0/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import com.redis.testcontainers.RedisContainer @@ -18,7 +20,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() @@ -128,7 +130,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) @@ -324,7 +326,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy index 2345f6de2cd..80fb3469f9d 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-2.3.0/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -17,7 +19,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() @@ -127,7 +129,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) @@ -323,7 +325,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy index e215da16ffb..db253dce65a 100644 --- a/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy +++ b/dd-java-agent/instrumentation/redisson/redisson-3.10.3/src/test/groovy/RedissonClientTest.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.api.config.TraceInstrumentationConfig.DB_CLIENT_HOST_SPLIT_BY_INSTANCE import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -17,7 +19,7 @@ import spock.lang.Shared abstract class RedissonClientTest extends VersionedNamingTestBase { @Shared - RedisServer redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME).waitingFor(Wait.forListeningPort()) + RedisServer redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")).waitingFor(Wait.forListeningPort()) @Shared Config config = new Config() @@ -120,7 +122,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) @@ -316,7 +318,7 @@ abstract class RedissonClientTest extends VersionedNamingTestBase { "$Tags.COMPONENT" "redis-command" "$Tags.SPAN_KIND" Tags.SPAN_KIND_CLIENT "$Tags.DB_TYPE" "redis" - "$Tags.PEER_HOSTNAME" "localhost" + "$Tags.PEER_HOSTNAME" redisServer.getHost() "$Tags.PEER_HOST_IPV4" "127.0.0.1" "$Tags.PEER_PORT" redisServer.firstMappedPort peerServiceFrom(Tags.PEER_HOSTNAME) diff --git a/dd-java-agent/instrumentation/spark-executor/build.gradle b/dd-java-agent/instrumentation/spark-executor/build.gradle index 4525dafc370..800361782c7 100644 --- a/dd-java-agent/instrumentation/spark-executor/build.gradle +++ b/dd-java-agent/instrumentation/spark-executor/build.gradle @@ -37,9 +37,10 @@ dependencies { baseTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: "2.4.0" baseTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: "2.4.0" - latest212DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: '3.+' - latest212DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: '3.+' + // FIXME: 3.6.0 seems missing from central + latest212DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.12", version: '3.5.5' + latest212DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.12", version: '3.5.5' - latest213DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.13", version: '3.+' - latest213DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.13", version: '3.+' + latest213DepTestImplementation group: 'org.apache.spark', name: "spark-core_2.13", version: '3.5.5' + latest213DepTestImplementation group: 'org.apache.spark', name: "spark-sql_2.13", version: '3.5.5' } diff --git a/dd-java-agent/instrumentation/spark/spark_2.13/build.gradle b/dd-java-agent/instrumentation/spark/spark_2.13/build.gradle index 90eaec596ff..3135412fa60 100644 --- a/dd-java-agent/instrumentation/spark/spark_2.13/build.gradle +++ b/dd-java-agent/instrumentation/spark/spark_2.13/build.gradle @@ -10,8 +10,13 @@ muzzle { pass { group = "org.apache.spark" module = "spark-sql_$scalaVersion" - versions = "[$sparkVersion,)" - assertInverse = true + versions = "[$sparkVersion,4.0.0)" + } + pass { + group = "org.apache.spark" + module = "spark-sql_$scalaVersion" + versions = "[4.0.0,)" + javaVersion = 17 } } @@ -47,9 +52,10 @@ dependencies { test_spark32Implementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: "3.2.4" // FIXME: Currently not working on Spark 4.0.0 preview releases. - latestDepTestImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: '3.+' - latestDepTestImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: '3.+' - latestDepTestImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: '3.+' + // FIXME: 3.6.0 seems missing from central + latestDepTestImplementation group: 'org.apache.spark', name: "spark-core_$scalaVersion", version: '3.5.5' + latestDepTestImplementation group: 'org.apache.spark', name: "spark-sql_$scalaVersion", version: '3.5.5' + latestDepTestImplementation group: 'org.apache.spark', name: "spark-yarn_$scalaVersion", version: '3.5.5' } tasks.named("test").configure { diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy index 32793911bbf..ed3e95d7abf 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/latestDepTest/groovy/test/boot/SpringBootServer.groovy @@ -22,7 +22,7 @@ class SpringBootServer implements WebsocketServer { @Override void start() { - app.setDefaultProperties(["server.port": 0, "server.servlet.context-path": "/$servletContext"]) + app.setDefaultProperties(["server.port": 0, "server.servlet.context-path": "/$servletContext", "server.forward-headers-strategy": "NONE"]) context = app.run() as ServletWebServerApplicationContext port = context.getWebServer().getPort() try { diff --git a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy index bc875bb70d5..2befeef9e7f 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-3.1/src/test/groovy/test/boot/SpringBootServer.groovy @@ -23,7 +23,7 @@ class SpringBootServer implements WebsocketServer { @Override void start() { - app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext"]) + app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext", "server.forward-headers-strategy": "NONE"]) context = app.run() as EmbeddedWebApplicationContext port = context.embeddedServletContainer.port try { diff --git a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy index 7700fac82ef..4c97b185c40 100644 --- a/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy +++ b/dd-java-agent/instrumentation/spring-webmvc-6.0/src/test/groovy/datadog/trace/instrumentation/springweb6/boot/SpringBootBasedTest.groovy @@ -72,7 +72,8 @@ class SpringBootBasedTest extends HttpServerTest void start() { app.setDefaultProperties(["server.port": 0, "server.context-path": "/$servletContext", "spring.mvc.throw-exception-if-no-handler-found": false, - "spring.web.resources.add-mappings" : false]) + "spring.web.resources.add-mappings" : false, + "server.forward-headers-strategy": "NONE"]) context = app.run() port = (context as ServletWebServerApplicationContext).webServer.port try { diff --git a/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy b/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy index 7357a9749fb..e55405e7e47 100644 --- a/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy +++ b/dd-java-agent/instrumentation/spymemcached-2.10/src/test/groovy/datadog/trace/instrumentation/spymemcached/SpymemcachedTest.groovy @@ -56,7 +56,7 @@ abstract class SpymemcachedTest extends VersionedNamingTestBase { } def setupSpec() { - memcachedContainer = new GenericContainer('memcached:1.6.14-alpine') + memcachedContainer = new GenericContainer('library/memcached:1.6.14-alpine') .withExposedPorts(defaultMemcachedPort) .withStartupTimeout(Duration.ofSeconds(120)) memcachedContainer.start() diff --git a/dd-java-agent/instrumentation/vertx-mysql-client-3.9/build.gradle b/dd-java-agent/instrumentation/vertx-mysql-client-3.9/build.gradle index 0a8136a7ea0..14071efc063 100644 --- a/dd-java-agent/instrumentation/vertx-mysql-client-3.9/build.gradle +++ b/dd-java-agent/instrumentation/vertx-mysql-client-3.9/build.gradle @@ -6,6 +6,7 @@ muzzle { group = 'io.vertx' module = 'vertx-mysql-client' versions = '[3.9.0,4)' + javaVersion = '11' assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-mysql-client-4.0/build.gradle b/dd-java-agent/instrumentation/vertx-mysql-client-4.0/build.gradle index 2d7f37a0c7a..68f27b187b5 100644 --- a/dd-java-agent/instrumentation/vertx-mysql-client-4.0/build.gradle +++ b/dd-java-agent/instrumentation/vertx-mysql-client-4.0/build.gradle @@ -5,7 +5,7 @@ muzzle { pass { group = 'io.vertx' module = 'vertx-mysql-client' - versions = '[4.0.0,5.0.0)' + versions = '[4.0.0,)' assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-mysql-client-4.4.2/build.gradle b/dd-java-agent/instrumentation/vertx-mysql-client-4.4.2/build.gradle index 199c43a6828..78d8ae4d01a 100644 --- a/dd-java-agent/instrumentation/vertx-mysql-client-4.4.2/build.gradle +++ b/dd-java-agent/instrumentation/vertx-mysql-client-4.4.2/build.gradle @@ -5,7 +5,7 @@ muzzle { pass { group = 'io.vertx' module = 'vertx-mysql-client' - versions = '[4.4.2,4.6.0)' + versions = '[4.4.2,)' assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-pg-client-4.0/build.gradle b/dd-java-agent/instrumentation/vertx-pg-client-4.0/build.gradle index cfa4eba8fa5..3b887b4a269 100644 --- a/dd-java-agent/instrumentation/vertx-pg-client-4.0/build.gradle +++ b/dd-java-agent/instrumentation/vertx-pg-client-4.0/build.gradle @@ -8,7 +8,13 @@ muzzle { // 4.1.1 is the first version where PgConnectionFactory is public // see https://github.com/eclipse-vertx/vertx-sql-client/pull/986 versions = '[4.1.1,5)' - assertInverse = true + } + fail { + group = 'io.vertx' + module = 'vertx-pg-client' + // 4.1.1 is the first version where PgConnectionFactory is public + // see https://github.com/eclipse-vertx/vertx-sql-client/pull/986 + versions = '(,4.1.1)' } } diff --git a/dd-java-agent/instrumentation/vertx-pg-client-4.4.2/build.gradle b/dd-java-agent/instrumentation/vertx-pg-client-4.4.2/build.gradle index fa167b7090d..fdc8fe1e71c 100644 --- a/dd-java-agent/instrumentation/vertx-pg-client-4.4.2/build.gradle +++ b/dd-java-agent/instrumentation/vertx-pg-client-4.4.2/build.gradle @@ -4,7 +4,7 @@ muzzle { pass { group = 'io.vertx' module = 'vertx-pg-client' - versions = '[4.4.2,4.6.0)' + versions = '[4.4.2,)' assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-redis-client-3.9/build.gradle b/dd-java-agent/instrumentation/vertx-redis-client-3.9/build.gradle index 62c07d97b7f..e058b993307 100644 --- a/dd-java-agent/instrumentation/vertx-redis-client-3.9/build.gradle +++ b/dd-java-agent/instrumentation/vertx-redis-client-3.9/build.gradle @@ -3,6 +3,7 @@ muzzle { group = 'io.vertx' module = 'vertx-redis-client' versions = '[3.9.0,)' + javaVersion = '11' assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy index d4c8151e9ee..97a1c25959f 100644 --- a/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy +++ b/dd-java-agent/instrumentation/vertx-redis-client-3.9/src/test/groovy/VertxRedisTestBase.groovy @@ -1,3 +1,5 @@ +import org.testcontainers.utility.DockerImageName + import static datadog.trace.agent.test.utils.TraceUtils.basicSpan import static datadog.trace.agent.test.utils.TraceUtils.runUnderTrace import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan @@ -33,7 +35,7 @@ abstract class VertxRedisTestBase extends VersionedNamingTestBase { @AutoCleanup(value = "stop") @Shared - def redisServer = new RedisContainer(RedisContainer.DEFAULT_IMAGE_NAME.withTag(RedisContainer.DEFAULT_TAG)) + def redisServer = new RedisContainer(DockerImageName.parse("redis:6.2.6")) .waitingFor(Wait.forListeningPort()) @Shared @@ -136,7 +138,7 @@ abstract class VertxRedisTestBase extends VersionedNamingTestBase { "$Tags.DB_TYPE" "redis" // FIXME: in some cases the connection is not extracted. Better to skip this test than mark the whole test as flaky "$Tags.PEER_PORT" { it == null || it == port } - "$Tags.PEER_HOSTNAME" { it == null || it == "127.0.0.1" || it == "localhost" } + "$Tags.PEER_HOSTNAME" { it == null || it == "127.0.0.1" || it == "localhost" || it == redisServer.getHost() } if (tag(Tags.PEER_HOSTNAME) != null) { peerServiceFrom(Tags.PEER_HOSTNAME) defaultTags() diff --git a/dd-java-agent/instrumentation/vertx-sql-client-3.9/build.gradle b/dd-java-agent/instrumentation/vertx-sql-client-3.9/build.gradle index b0da108ffc6..9dfcd4d305c 100644 --- a/dd-java-agent/instrumentation/vertx-sql-client-3.9/build.gradle +++ b/dd-java-agent/instrumentation/vertx-sql-client-3.9/build.gradle @@ -5,6 +5,7 @@ muzzle { group = "io.vertx" module = "vertx-sql-client" versions = "[3.9.0,)" + javaVersion = "11" } } diff --git a/dd-java-agent/instrumentation/vertx-web-3.9/build.gradle b/dd-java-agent/instrumentation/vertx-web-3.9/build.gradle index fb2b97a06b8..5861f0980eb 100644 --- a/dd-java-agent/instrumentation/vertx-web-3.9/build.gradle +++ b/dd-java-agent/instrumentation/vertx-web-3.9/build.gradle @@ -10,7 +10,8 @@ muzzle { pass { group = 'io.vertx' module = "vertx-web" - versions = "[3.8.1,5)" + versions = "[3.8.1,)" // it won't fail on 5+ since won't apply + javaVersion = "11" assertInverse = true } } diff --git a/dd-java-agent/instrumentation/vertx-web-4.0/build.gradle b/dd-java-agent/instrumentation/vertx-web-4.0/build.gradle index b18d87758bb..0b0ebe88d4f 100644 --- a/dd-java-agent/instrumentation/vertx-web-4.0/build.gradle +++ b/dd-java-agent/instrumentation/vertx-web-4.0/build.gradle @@ -16,7 +16,11 @@ muzzle { group = 'io.vertx' module = "vertx-web" versions = "[4.0.0,5)" - assertInverse = true + } + fail { + group = 'io.vertx' + module = "vertx-web" + versions = "(,4.0.0)" } } diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy index 547431d7ace..8c70e25d876 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/AgentTestRunner.groovy @@ -368,8 +368,13 @@ abstract class AgentTestRunner extends DDSpecification implements AgentBuilder.L TEST_WRITER = new ListWriter() if (isTestAgentEnabled()) { + String agentHost = System.getenv("CI_AGENT_HOST") + if (agentHost == null) { + agentHost = DEFAULT_AGENT_HOST + } + // emit traces to the APM Test-Agent for Cross-Tracer Testing Trace Checks - HttpUrl agentUrl = HttpUrl.get("http://" + DEFAULT_AGENT_HOST + ":" + DEFAULT_TRACE_AGENT_PORT) + HttpUrl agentUrl = HttpUrl.get("http://" + agentHost + ":" + DEFAULT_TRACE_AGENT_PORT) OkHttpClient client = buildHttpClient(agentUrl, null, null, TimeUnit.SECONDS.toMillis(DEFAULT_AGENT_TIMEOUT)) DDAgentFeaturesDiscovery featureDiscovery = new DDAgentFeaturesDiscovery(client, Monitoring.DISABLED, agentUrl, Config.get().isTraceAgentV05Enabled(), Config.get().isTracerMetricsEnabled()) TEST_AGENT_API = new DDAgentApi(client, agentUrl, featureDiscovery, Monitoring.DISABLED, Config.get().isTracerMetricsEnabled()) diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy index d13e9cc2544..97c1ef36b83 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/base/HttpServerTest.groovy @@ -574,7 +574,10 @@ abstract class HttpServerTest extends WithHttpServer { def responses def request = request(SUCCESS, method, body).build() if (testParallelRequest()) { - def executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()) + // Limit pool size. Too many threads overwhelm the server and starve the host + def availableProcessorsOverride = System.getenv().get("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE") + def poolSize = availableProcessorsOverride == null ? Runtime.getRuntime().availableProcessors() : Integer.valueOf(availableProcessorsOverride) + def executor = Executors.newFixedThreadPool(poolSize) def completionService = new ExecutorCompletionService(executor) (1..count).each { completionService.submit { @@ -1295,7 +1298,7 @@ abstract class HttpServerTest extends WithHttpServer { def traces = extraSpan ? 2 : 1 def extraTags = [(IG_RESPONSE_STATUS): String.valueOf(endpoint.status)] as Map if (hasPeerInformation()) { - extraTags.put(IG_PEER_ADDRESS, { it == "127.0.0.1" || it == "0.0.0.0" }) + extraTags.put(IG_PEER_ADDRESS, { it == "127.0.0.1" || it == "0.0.0.0" || it == "0:0:0:0:0:0:0:1" }) extraTags.put(IG_PEER_PORT, { Integer.parseInt(it as String) instanceof Integer }) } extraTags.put(IG_RESPONSE_HEADER_TAG, IG_RESPONSE_HEADER_VALUE) @@ -2218,8 +2221,13 @@ abstract class HttpServerTest extends WithHttpServer { if (hasPeerPort) { "$Tags.PEER_PORT" Integer } - "$Tags.PEER_HOST_IPV4" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } - "$Tags.HTTP_CLIENT_IP" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + if(span.getTag(Tags.PEER_HOST_IPV6) != null) { + "$Tags.PEER_HOST_IPV6" { it == "0:0:0:0:0:0:0:1" || (endpoint == FORWARDED && it == endpoint.body) } + "$Tags.HTTP_CLIENT_IP" { it == "0:0:0:0:0:0:0:1" || (endpoint == FORWARDED && it == endpoint.body) } + } else { + "$Tags.PEER_HOST_IPV4" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + "$Tags.HTTP_CLIENT_IP" { it == "127.0.0.1" || (endpoint == FORWARDED && it == endpoint.body) } + } } else { "$Tags.HTTP_CLIENT_IP" clientIp } diff --git a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy index dcb0d6a2292..ea140c777f1 100644 --- a/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy +++ b/dd-java-agent/testing/src/main/groovy/datadog/trace/agent/test/server/http/TestHttpServer.groovy @@ -19,6 +19,7 @@ import org.eclipse.jetty.server.SslConnectionFactory import org.eclipse.jetty.server.handler.AbstractHandler import org.eclipse.jetty.server.handler.HandlerList import org.eclipse.jetty.util.ssl.SslContextFactory +import org.eclipse.jetty.util.thread.QueuedThreadPool import javax.net.ssl.HostnameVerifier import javax.net.ssl.SSLContext @@ -82,7 +83,10 @@ class TestHttpServer implements AutoCloseable { } private TestHttpServer() { - internalServer = new Server() + // In some versions, Jetty requires max threads > than some arbitrary calculated value + // The calculated value can be high in CI + // There is no easy way to override the configuration in a version-neutral way + internalServer = new Server(new QueuedThreadPool(400)) TrustManager[] trustManagers = new TrustManager[1] trustManagers[0] = trustManager @@ -124,7 +128,6 @@ class TestHttpServer implements AutoCloseable { internalServer.addConnector(https) customizer.call(internalServer) - internalServer.start() // set after starting, otherwise two callbacks get added. internalServer.stopAtShutdown = true diff --git a/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java b/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java index 65b3305345e..5e8be98ea6d 100644 --- a/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java +++ b/dd-smoke-tests/crashtracking/src/main/java/datadog/smoketest/crashtracking/CrashtrackingTestApplication.java @@ -39,7 +39,7 @@ public static void main(String[] args) throws Exception { while (size < 1024) { buffer.add(new byte[size * 1024 * 1024]); System.out.println("Allocated " + size + "MB"); - if (size < 256) { + if (size < 512) { size *= 2; } } diff --git a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java index 54d9290941c..e3d8b38659a 100644 --- a/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java +++ b/dd-smoke-tests/crashtracking/src/test/java/datadog/smoketest/CrashtrackingSmokeTest.java @@ -217,6 +217,8 @@ void testOomeTracking() throws Exception { Arrays.asList( javaPath(), "-javaagent:" + agentShadowJar(), + "-Xmx96m", + "-Xms96m", "-XX:OnOutOfMemoryError=" + onErrorValue, "-XX:ErrorFile=" + errorFile, "-XX:+CrashOnOutOfMemoryError", // Use OOME to trigger crash @@ -248,6 +250,8 @@ void testCombineTracking() throws Exception { Arrays.asList( javaPath(), "-javaagent:" + agentShadowJar(), + "-Xmx96m", + "-Xms96m", "-XX:OnOutOfMemoryError=" + onOomeValue, "-XX:OnError=" + onErrorValue, "-XX:ErrorFile=" + errorFile, diff --git a/dd-smoke-tests/debugger-integration-tests/src/test/java/datadog/smoketest/ProcessBuilderHelper.java b/dd-smoke-tests/debugger-integration-tests/src/test/java/datadog/smoketest/ProcessBuilderHelper.java index 1ae8303df4b..1b107f7c31d 100644 --- a/dd-smoke-tests/debugger-integration-tests/src/test/java/datadog/smoketest/ProcessBuilderHelper.java +++ b/dd-smoke-tests/debugger-integration-tests/src/test/java/datadog/smoketest/ProcessBuilderHelper.java @@ -32,7 +32,7 @@ public static ProcessBuilder createProcessBuilder( Arrays.asList( javaPath(), // "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=localhost:5006", - "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "512M"), + "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "1024M"), "-Xms" + System.getProperty("datadog.forkedMinHeapSize", "64M"), "-javaagent:" + agentShadowJar(), "-XX:ErrorFile=/tmp/hs_err_pid%p.log", diff --git a/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy b/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy index 094c46f5206..0fe155ab45e 100644 --- a/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy +++ b/dd-smoke-tests/gradle/src/test/groovy/datadog/smoketest/GradleLauncherSmokeTest.groovy @@ -9,7 +9,7 @@ import datadog.trace.civisibility.utils.ShellCommandExecutor */ class GradleLauncherSmokeTest extends AbstractGradleTest { - private static final int GRADLE_BUILD_TIMEOUT_MILLIS = 60_000 + private static final int GRADLE_BUILD_TIMEOUT_MILLIS = 90_000 private static final String AGENT_JAR = System.getProperty("datadog.smoketest.agent.shadowJar.path") diff --git a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy index cfa0ebc2632..9e4c7f1f6e9 100644 --- a/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy +++ b/dd-smoke-tests/log-injection/src/test/resources/SpockConfig.groovy @@ -1,5 +1,11 @@ runner { parallel { enabled true + + // Runtime.getRuntime().availableProcessors() is used to scale the parallelism by default + // but it returns weird values in Gitlab/kubernetes so fix the parallelism to a specific value + if (System.getenv("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE") != null) { + fixed(Integer.valueOf(System.getenv("RUNTIME_AVAILABLE_PROCESSORS_OVERRIDE"))) + } } } diff --git a/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/JFRBasedProfilingIntegrationTest.java b/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/JFRBasedProfilingIntegrationTest.java index 02d49e29961..6adf84ddacc 100644 --- a/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/JFRBasedProfilingIntegrationTest.java +++ b/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/JFRBasedProfilingIntegrationTest.java @@ -748,7 +748,7 @@ private static ProcessBuilder createProcessBuilder( final List command = Arrays.asList( javaPath(), - "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "512M"), + "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "1024M"), "-Xms" + System.getProperty("datadog.forkedMinHeapSize", "64M"), "-javaagent:" + agentShadowJar(), "-XX:ErrorFile=/tmp/hs_err_pid%p.log", diff --git a/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/SmokeTestUtils.java b/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/SmokeTestUtils.java index 12bceeeadd0..d1a13d80fb5 100644 --- a/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/SmokeTestUtils.java +++ b/dd-smoke-tests/profiling-integration-tests/src/test/java/datadog/smoketest/SmokeTestUtils.java @@ -28,7 +28,7 @@ static ProcessBuilder createProcessBuilder( new ArrayList<>( Arrays.asList( javaPath(), - "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "512M"), + "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "1024M"), "-Xms" + System.getProperty("datadog.forkedMinHeapSize", "64M"), "-javaagent:" + agentShadowJar(), "-XX:ErrorFile=/tmp/hs_err_pid%p.log", diff --git a/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle index 868dcf3239e..441b0ee29f9 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/application/build.gradle @@ -39,6 +39,7 @@ if (hasProperty('agentPath')) { if (withProfiler && property('profiler') == 'true') { buildArgs.add("-J-Ddd.profiling.enabled=true") } + jvmArgs.add("-Xmx3072M") } } } diff --git a/dd-smoke-tests/spring-boot-3.0-native/build.gradle b/dd-smoke-tests/spring-boot-3.0-native/build.gradle index ed37cbb264c..167af79baa2 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/build.gradle +++ b/dd-smoke-tests/spring-boot-3.0-native/build.gradle @@ -31,7 +31,7 @@ if (version >= 17) { tasks.register('springNativeBuild', Exec) { workingDir "$appDir" environment += [ - 'GRADLE_OPTS': "-Dorg.gradle.jvmargs='-Xmx512M'", + 'GRADLE_OPTS': "-Dorg.gradle.jvmargs='-Xmx1024M'", 'JAVA_HOME': javaHome, 'GRAALVM_HOME': testJvmHome, 'DD_TRACE_METHODS' : 'datadog.smoketest.springboot.controller.WebController[sayHello]', diff --git a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy index c5b9edeea24..6e23241f3fd 100644 --- a/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy +++ b/dd-smoke-tests/spring-boot-3.0-native/src/test/groovy/SpringBootNativeInstrumentationTest.groovy @@ -7,6 +7,7 @@ import spock.lang.Shared import spock.lang.TempDir import org.openjdk.jmc.flightrecorder.JfrLoaderToolkit +import spock.util.concurrent.PollingConditions import java.nio.file.FileVisitResult import java.nio.file.Files @@ -14,7 +15,6 @@ import java.nio.file.Path import java.nio.file.SimpleFileVisitor import java.nio.file.attribute.BasicFileAttributes import java.util.concurrent.atomic.AtomicInteger -import java.util.concurrent.locks.LockSupport class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { @Shared @@ -69,24 +69,22 @@ class SpringBootNativeInstrumentationTest extends AbstractServerSmokeTest { def "check native instrumentation"() { setup: String url = "http://localhost:${httpPort}/hello" + def conditions = new PollingConditions(initialDelay: 2, timeout: 6) when: def response = client.newCall(new Request.Builder().url(url).get().build()).execute() then: - def ts = System.nanoTime() def responseBodyStr = response.body().string() responseBodyStr != null responseBodyStr.contains("Hello world") waitForTraceCount(1) - // sanity test for profiler generating JFR files - // the recording is collected after 1 second of execution - // make sure the app has been up and running for at least 1.5 seconds - while (System.nanoTime() - ts < 1_500_000_000L) { - LockSupport.parkNanos(1_000_000) + conditions.eventually { + assert countJfrs() > 0 } - countJfrs() > 0 + + udpMessage.get(1, TimeUnit.SECONDS) contains "service:smoke-test-java-app,version:99,env:smoketest" } int countJfrs() { diff --git a/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy b/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy index 72b33709b1d..f9d9c4d8930 100644 --- a/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy +++ b/dd-smoke-tests/springboot-mongo/src/test/groovy/datadog/smoketest/SpringBootMongoIntegrationTest.groovy @@ -23,7 +23,7 @@ class SpringBootMongoIntegrationTest extends AbstractServerSmokeTest { @Override void beforeProcessBuilders() { - mongoDbContainer = new MongoDBContainer(DockerImageName.parse("mongo:4.0.10")) + mongoDbContainer = new MongoDBContainer(DockerImageName.parse("mongo:4.4.29")) mongoDbContainer.start() mongoDbUri = mongoDbContainer.replicaSetUrl } diff --git a/dd-smoke-tests/springboot-openliberty-20/application/pom.xml b/dd-smoke-tests/springboot-openliberty-20/application/pom.xml index d9e341675ae..85b6859c514 100644 --- a/dd-smoke-tests/springboot-openliberty-20/application/pom.xml +++ b/dd-smoke-tests/springboot-openliberty-20/application/pom.xml @@ -107,5 +107,25 @@ ${target.dir} + + maven-proxy-profile + + + env.MAVEN_REPOSITORY_PROXY + + + + + maven-proxy-repo + ${env.MAVEN_REPOSITORY_PROXY} + + + + + maven-plugin-proxy + ${env.MAVEN_REPOSITORY_PROXY} + + + diff --git a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml index a6b66b8083a..5a8fb308d78 100644 --- a/dd-smoke-tests/springboot-openliberty-23/application/pom.xml +++ b/dd-smoke-tests/springboot-openliberty-23/application/pom.xml @@ -107,5 +107,25 @@ ${target.dir} + + maven-proxy-profile + + + env.MAVEN_REPOSITORY_PROXY + + + + + maven-proxy-repo + ${env.MAVEN_REPOSITORY_PROXY} + + + + + maven-plugin-proxy + ${env.MAVEN_REPOSITORY_PROXY} + + + diff --git a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy index 8fa819362d9..498a4b4a0af 100644 --- a/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy +++ b/dd-trace-core/src/test/groovy/datadog/trace/common/metrics/ConflatingMetricAggregatorTest.groovy @@ -23,7 +23,7 @@ class ConflatingMetricAggregatorTest extends DDSpecification { static final int HTTP_OK = 200 @Shared - long reportingInterval = 10 + long reportingInterval = 1 @Shared int queueSize = 256 @@ -106,9 +106,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { CountDownLatch latch = new CountDownLatch(1) aggregator.publish([new SimpleSpan("service", "operation", "resource", "type", false, true, false, 0, 100, HTTP_OK)]) aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: + latchTriggered 1 * writer.startBucket(1, _, _) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == 1 && value.getDuration() == 100 @@ -135,9 +136,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service", "operation", "resource", "type", measured, topLevel, false, 0, 100, HTTP_OK) ]) aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: + latchTriggered 1 * writer.startBucket(1, _, _) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getTopLevelCount() == topLevelCount && value.getDuration() == 100 @@ -177,9 +179,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { aggregator.publish(trace) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "metrics should be conflated" + latchTriggered 1 * writer.finishBucket() >> { latch.countDown() } 1 * writer.startBucket(2, _, SECONDS.toNanos(reportingInterval)) 1 * writer.add(new MetricKey("resource", "service", "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -216,9 +219,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "the first aggregate should be dropped but the rest reported" + latchTriggered 1 * writer.startBucket(10, _, SECONDS.toNanos(reportingInterval)) for (int i = 1; i < 11; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -252,9 +256,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -271,9 +276,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + latchTriggered = latch.await(2, SECONDS) then: "aggregate not updated in cycle is not reported" + latchTriggered 1 * writer.startBucket(4, _, SECONDS.toNanos(reportingInterval)) for (int i = 1; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -307,16 +313,17 @@ class ConflatingMetricAggregatorTest extends DDSpecification { ]) } aggregator.report() - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(reportingInterval)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> value.getHitCount() == 1 && value.getDuration() == duration } } - 1 * writer.finishBucket() + 1 * writer.finishBucket() >> { latch.countDown() } when: reportAndWaitUntilEmpty(aggregator) @@ -349,9 +356,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK) ]) } - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "all aggregates should be reported" + latchTriggered 1 * writer.startBucket(5, _, SECONDS.toNanos(1)) for (int i = 0; i < 5; ++i) { 1 * writer.add(new MetricKey("resource", "service" + i, "operation", "type", HTTP_OK, false), _) >> { MetricKey key, AggregateMetric value -> @@ -421,9 +429,10 @@ class ConflatingMetricAggregatorTest extends DDSpecification { new SimpleSpan("service" + i, "operation", "resource", "type", false, true, false, 0, duration, HTTP_OK) ]) } - latch.await(2, SECONDS) + def latchTriggered = latch.await(2, SECONDS) then: "writer should be reset if reporting fails" + latchTriggered 1 * writer.startBucket(_, _, _) >> { throw new IllegalArgumentException("something went wrong") } @@ -449,6 +458,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { then: notThrown(TimeoutException) !flushed + + cleanup: + aggregator.close() } def "force flush should wait for aggregator to start"() { @@ -480,6 +492,9 @@ class ConflatingMetricAggregatorTest extends DDSpecification { then: notThrown(TimeoutException) flushed + + cleanup: + aggregator.close() } def reportAndWaitUntilEmpty(ConflatingMetricsAggregator aggregator) { diff --git a/gradle/configure_tests.gradle b/gradle/configure_tests.gradle index 60280fa3144..945a9e9665b 100644 --- a/gradle/configure_tests.gradle +++ b/gradle/configure_tests.gradle @@ -20,7 +20,7 @@ def isTestingInstrumentation(Project project) { } def forkedTestLimit = gradle.sharedServices.registerIfAbsent("forkedTestLimit", BuildService) { - maxParallelUsages = 2 + maxParallelUsages = 3 } // Force timeout after 9 minutes (CircleCI defaults will fail after 10 minutes without output) @@ -69,7 +69,6 @@ tasks.withType(Test).configureEach { if (name.startsWith("forkedTest") || name.endsWith("ForkedTest")) { setExcludes([]) setIncludes(["**/*ForkedTest*"]) - jvmArgs += ["-Xms256M", "-Xmx256M"] forkEvery 1 // Limit the number of concurrent forked tests usesService(forkedTestLimit) diff --git a/utils/test-utils/src/main/java/datadog/trace/test/util/ForkedTestUtils.java b/utils/test-utils/src/main/java/datadog/trace/test/util/ForkedTestUtils.java index 5363edc9dd7..03e8a279ad5 100644 --- a/utils/test-utils/src/main/java/datadog/trace/test/util/ForkedTestUtils.java +++ b/utils/test-utils/src/main/java/datadog/trace/test/util/ForkedTestUtils.java @@ -2,7 +2,7 @@ public class ForkedTestUtils { public static String getMaxMemoryArgumentForFork() { - return "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "512M"); + return "-Xmx" + System.getProperty("datadog.forkedMaxHeapSize", "1024M"); } public static String getMinMemoryArgumentForFork() {