diff --git a/.github/actions/cpUtility-testing/action.yml b/.github/actions/cpUtility-testing/action.yml index 883763ccdc..a59ad5ac05 100644 --- a/.github/actions/cpUtility-testing/action.yml +++ b/.github/actions/cpUtility-testing/action.yml @@ -25,28 +25,28 @@ runs: using: "composite" steps: - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ inputs.snapshot-ecr-role }} aws-region: ${{ inputs.aws-region }} - name: Login to private staging ecr - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: ${{ inputs.image_registry }} env: AWS_REGION: ${{ inputs.aws-region }} - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ inputs.adot-java-version }}" @@ -60,7 +60,7 @@ runs: run: .github/scripts/test-adot-javaagent-image.sh "${{ inputs.image_uri_with_tag }}" "${{ inputs.adot-java-version }}" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ inputs.adot-java-version }}" diff --git a/.github/actions/image_scan/action.yml b/.github/actions/image_scan/action.yml index eb19f78609..57a30dae58 100644 --- a/.github/actions/image_scan/action.yml +++ b/.github/actions/image_scan/action.yml @@ -11,6 +11,11 @@ inputs: severity: required: true description: "List of severities that will cause a failure" + logout: + required: true + description: | + Whether to logout of public AWS ECR. Set to 'true' for PR workflows to avoid potential call failures, + 'false' for daily scans which has a higher bar for passing regularly and specifically wants to sign in. runs: using: "composite" @@ -22,11 +27,12 @@ runs: # ensure we can make unauthenticated call. This is important for making the pr_build workflow run on # PRs created from forked repos. - name: Logout of public AWS ECR + if: inputs.logout == 'true' shell: bash run: docker logout public.ecr.aws - name: Run Trivy vulnerability scanner on image - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 #v0.33.1 with: image-ref: ${{ inputs.image-ref }} severity: ${{ inputs.severity }} diff --git a/.github/actions/patch-dependencies/action.yml b/.github/actions/patch-dependencies/action.yml index 55399a6a02..c8599d3b47 100644 --- a/.github/actions/patch-dependencies/action.yml +++ b/.github/actions/patch-dependencies/action.yml @@ -60,21 +60,22 @@ runs: - name: Clone and patch repositories run: .github/scripts/patch.sh if: ${{ env.patch_otel_java == 'true' || - env.patch_otel_java_contrib == 'true' }} + env.patch_otel_java_contrib == 'true' || + env.patch_otel_java_instrumentation == 'true' }} shell: bash - name: Build opentelemetry-java with tests - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests != 'false' }} with: - arguments: build publishToMavenLocal + arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java - name: Build opentelemetry-java - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests == 'false' }} with: - arguments: publishToMavenLocal + arguments: publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java - name: cleanup opentelemetry-java @@ -83,17 +84,17 @@ runs: shell: bash - name: Build opentelemetry-java-contrib with tests - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests != 'false' }} with: - arguments: build publishToMavenLocal + arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-contrib - name: Build opentelemetry-java-contrib - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests == 'false' }} with: - arguments: publishToMavenLocal + arguments: publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-contrib - name: cleanup opentelemetry-java-contrib @@ -102,20 +103,20 @@ runs: shell: bash - name: Build opentelemetry-java-instrumentation with tests - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests != 'false' }} with: - arguments: check -x spotlessCheck publishToMavenLocal + arguments: check -x spotlessCheck publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-instrumentation - name: Build opentelemetry java instrumentation - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests == 'false' }} with: - arguments: publishToMavenLocal + arguments: publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-instrumentation - name: cleanup opentelmetry-java-instrumentation run: rm -rf opentelemetry-java-instrumentation if: ${{ env.patch_otel_java_instrumentation == 'true' }} - shell: bash \ No newline at end of file + shell: bash diff --git a/.github/patches/opentelemetry-java-contrib.patch b/.github/patches/opentelemetry-java-contrib.patch index 68a35bc64e..6db6a272db 100644 --- a/.github/patches/opentelemetry-java-contrib.patch +++ b/.github/patches/opentelemetry-java-contrib.patch @@ -1,10 +1,50 @@ +diff --git a/.github/renovate.json5 b/.github/renovate.json5 +index 4f7743a3..9e2082ed 100644 +--- a/.github/renovate.json5 ++++ b/.github/renovate.json5 +@@ -176,5 +176,27 @@ + 'npx (?[^@]+)@(?[^\\s]+)', + ], + }, ++ { ++ customType: 'regex', ++ datasourceTemplate: 'java-version', ++ managerFilePatterns: [ ++ '.github/workflows/**', ++ ], ++ matchStrings: [ ++ '(?\\d+) # renovate: datasource=java-version', ++ ], ++ depNameTemplate: 'java', ++ extractVersionTemplate: '^(?\\d+)', ++ }, ++ { ++ customType: 'regex', ++ datasourceTemplate: 'github-releases', ++ managerFilePatterns: [ ++ '**/build.gradle.kts', ++ ], ++ matchStrings: [ ++ '"https://github.com/(?[^/]+/[^/]+)/zipball/(?.+?)"', ++ ], ++ }, + ], + } diff --git a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java -index 2d8de301..54e26e12 100644 +index 721d0144..dce2d2a3 100644 --- a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java +++ b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java -@@ -68,6 +68,9 @@ public final class AwsXrayPropagator implements TextMapPropagator { - private static final char IS_SAMPLED = '1'; - private static final char NOT_SAMPLED = '0'; +@@ -9,6 +9,7 @@ import static io.opentelemetry.api.internal.OtelEncodingUtils.isValidBase16Strin + + import io.opentelemetry.api.baggage.Baggage; + import io.opentelemetry.api.baggage.BaggageBuilder; ++import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; + import io.opentelemetry.api.internal.StringUtils; + import io.opentelemetry.api.trace.Span; + import io.opentelemetry.api.trace.SpanContext; +@@ -80,6 +81,9 @@ public final class AwsXrayPropagator implements TextMapPropagator { + private static final String INVALID_LINEAGE = "-1:11111111:0"; + private static final int NUM_OF_LINEAGE_DELIMITERS = 2; + // Copied from AwsSamplingResult in aws-xray extension + private static final String AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY = "xrsr"; @@ -12,10 +52,10 @@ index 2d8de301..54e26e12 100644 private static final List FIELDS = Collections.singletonList(TRACE_HEADER_KEY); private static final AwsXrayPropagator INSTANCE = new AwsXrayPropagator(); -@@ -127,6 +130,16 @@ public final class AwsXrayPropagator implements TextMapPropagator { - .append(samplingFlag); +@@ -140,6 +144,16 @@ public final class AwsXrayPropagator implements TextMapPropagator { Baggage baggage = Baggage.fromContext(context); + String lineageHeader = baggage.getEntryValue(LINEAGE_KEY); + // Get sampling rule from trace state and inject into baggage + // This is a back up in case the next service does not have trace state propagation + String ruleFromTraceState = @@ -26,14 +66,23 @@ index 2d8de301..54e26e12 100644 + .put(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY, ruleFromTraceState) + .build(); + } - // Truncate baggage to 256 chars per X-Ray spec. - baggage.forEach( - new BiConsumer() { -@@ -236,13 +249,15 @@ public final class AwsXrayPropagator implements TextMapPropagator { - if (spanId == null || traceId == null) { + + if (lineageHeader != null) { + traceHeader +@@ -152,6 +166,9 @@ public final class AwsXrayPropagator implements TextMapPropagator { + // add 256 character truncation + String truncatedTraceHeader = traceHeader.substring(0, Math.min(traceHeader.length(), 256)); + setter.set(carrier, TRACE_HEADER_KEY, truncatedTraceHeader); ++ ++ // Ensure baggage is propagated with any modifications ++ W3CBaggagePropagator.getInstance().inject(context.with(baggage), carrier, setter); + } + + @Override +@@ -245,12 +262,15 @@ public final class AwsXrayPropagator implements TextMapPropagator { logger.finest("Both traceId and spanId are required to extract a valid span context. "); } -- + + SpanContext upstreamSpanContext = Span.fromContext(context).getSpanContext(); SpanContext spanContext = SpanContext.createFromRemoteParent( @@ -44,11 +93,11 @@ index 2d8de301..54e26e12 100644 + upstreamSpanContext.isValid() + ? upstreamSpanContext.getTraceState() + : TraceState.getDefault()); + if (spanContext.isValid()) { context = context.with(Span.wrap(spanContext)); - } diff --git a/aws-xray/build.gradle.kts b/aws-xray/build.gradle.kts -index ccec9d52..fddbad18 100644 +index 54dabba7..d56b12bd 100644 --- a/aws-xray/build.gradle.kts +++ b/aws-xray/build.gradle.kts @@ -11,6 +11,7 @@ dependencies { @@ -58,8 +107,8 @@ index ccec9d52..fddbad18 100644 + implementation("io.opentelemetry.semconv:opentelemetry-semconv:1.32.0-alpha") implementation("com.squareup.okhttp3:okhttp") - implementation("io.opentelemetry:opentelemetry-semconv") -@@ -24,6 +25,7 @@ dependencies { + implementation("io.opentelemetry.semconv:opentelemetry-semconv") +@@ -25,6 +26,7 @@ dependencies { implementation("com.fasterxml.jackson.core:jackson-core") implementation("com.fasterxml.jackson.core:jackson-databind") @@ -284,7 +333,7 @@ index 00000000..dc5b7a01 + } +} diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java -index 9b5a2e7e..31d5a293 100644 +index ad9b72a2..31d5a293 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java @@ -9,16 +9,22 @@ import io.opentelemetry.api.common.Attributes; @@ -320,8 +369,11 @@ index 9b5a2e7e..31d5a293 100644 private final Resource resource; private final Clock clock; private final Sampler initialSampler; -@@ -58,6 +67,9 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { +@@ -56,9 +65,11 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + @Nullable private volatile ScheduledFuture pollFuture; + @Nullable private volatile ScheduledFuture fetchTargetsFuture; @Nullable private volatile GetSamplingRulesResponse previousRulesResponse; +- @Nullable private volatile XrayRulesSampler internalXrayRulesSampler; private volatile Sampler sampler; + @Nullable private AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig; @@ -330,7 +382,7 @@ index 9b5a2e7e..31d5a293 100644 /** * Returns a {@link AwsXrayRemoteSamplerBuilder} with the given {@link Resource}. This {@link * Resource} should be the same as what the OpenTelemetry SDK is configured with. -@@ -119,6 +131,40 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { +@@ -120,13 +131,47 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { return "AwsXrayRemoteSampler{" + sampler.getDescription() + "}"; } @@ -371,18 +423,36 @@ index 9b5a2e7e..31d5a293 100644 private void getAndUpdateSampler() { try { // No pagination support yet, or possibly ever. -@@ -133,7 +179,8 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + GetSamplingRulesResponse response = + client.getSamplingRules(GetSamplingRulesRequest.create(null)); + if (!response.equals(previousRulesResponse)) { +- updateInternalSamplers( ++ sampler = + new XrayRulesSampler( + clientId, + resource, +@@ -134,8 +179,8 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { initialSampler, response.getSamplingRules().stream() .map(SamplingRuleRecord::getRule) -- .collect(Collectors.toList())); +- .collect(Collectors.toList()))); +- + .collect(Collectors.toList()), + adaptiveSamplingConfig); previousRulesResponse = response; ScheduledFuture existingFetchTargetsFuture = fetchTargetsFuture; if (existingFetchTargetsFuture != null) { -@@ -177,14 +224,29 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { - XrayRulesSampler xrayRulesSampler = (XrayRulesSampler) sampler; +@@ -172,25 +217,41 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + } + + private void fetchTargets() { +- if (this.internalXrayRulesSampler == null) { ++ if (!(sampler instanceof XrayRulesSampler)) { + throw new IllegalStateException("Programming bug."); + } + +- XrayRulesSampler xrayRulesSampler = this.internalXrayRulesSampler; ++ XrayRulesSampler xrayRulesSampler = (XrayRulesSampler) sampler; try { Date now = Date.from(Instant.ofEpochSecond(0, clock.now())); - List statistics = xrayRulesSampler.snapshot(now); @@ -414,6 +484,24 @@ index 9b5a2e7e..31d5a293 100644 Map targets = response.getDocuments().stream() .collect(Collectors.toMap(SamplingTargetDocument::getRuleName, Function.identity())); +- updateInternalSamplers(xrayRulesSampler.withTargets(targets, requestedTargetRuleNames, now)); ++ sampler = ++ xrayRulesSampler = xrayRulesSampler.withTargets(targets, requestedTargetRuleNames, now); + } catch (Throwable t) { + // Might be a transient API failure, try again after a default interval. + fetchTargetsFuture = +@@ -226,11 +287,6 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + return new String(clientIdChars); + } + +- private void updateInternalSamplers(XrayRulesSampler xrayRulesSampler) { +- this.internalXrayRulesSampler = xrayRulesSampler; +- this.sampler = Sampler.parentBased(internalXrayRulesSampler); +- } +- + // Visible for testing + XraySamplerClient getClient() { + return client; diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java index dca930d5..01835dc2 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java @@ -608,11 +696,14 @@ index c1e178f5..406f07e2 100644 + } } diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -index 1ef8abf5..328e63dd 100644 +index 1d97c4ae..dd369f5f 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -@@ -9,10 +9,13 @@ import io.opentelemetry.api.common.AttributeKey; +@@ -9,12 +9,16 @@ import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; + + import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; ++import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.context.Context; +import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingBoostStatisticsDocument; @@ -625,19 +716,7 @@ index 1ef8abf5..328e63dd 100644 import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingDecision; -@@ -35,6 +38,11 @@ final class SamplingRuleApplier { - - private static final Map XRAY_CLOUD_PLATFORM; - -+ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); -+ private static final AttributeKey URL_FULL = AttributeKey.stringKey("url.full"); -+ private static final AttributeKey HTTP_REQUEST_METHOD = -+ AttributeKey.stringKey("http.request.method"); -+ - static { - Map xrayCloudPlatform = new HashMap<>(); - xrayCloudPlatform.put(ResourceAttributes.CloudPlatformValues.AWS_EC2, "AWS::EC2::Instance"); -@@ -50,12 +58,20 @@ final class SamplingRuleApplier { +@@ -76,12 +80,20 @@ final class SamplingRuleApplier { private final String clientId; private final String ruleName; @@ -658,7 +737,7 @@ index 1ef8abf5..328e63dd 100644 private final Map attributeMatchers; private final Matcher urlPathMatcher; private final Matcher serviceNameMatcher; -@@ -68,7 +84,11 @@ final class SamplingRuleApplier { +@@ -94,7 +106,11 @@ final class SamplingRuleApplier { private final long nextSnapshotTimeNanos; @@ -671,7 +750,7 @@ index 1ef8abf5..328e63dd 100644 this.clientId = clientId; this.clock = clock; String ruleName = rule.getRuleName(); -@@ -82,6 +102,8 @@ final class SamplingRuleApplier { +@@ -108,6 +124,8 @@ final class SamplingRuleApplier { } this.ruleName = ruleName; @@ -680,7 +759,7 @@ index 1ef8abf5..328e63dd 100644 // We don't have a SamplingTarget so are ready to report a snapshot right away. nextSnapshotTimeNanos = clock.nanoTime(); -@@ -98,7 +120,15 @@ final class SamplingRuleApplier { +@@ -124,7 +142,15 @@ final class SamplingRuleApplier { reservoirSampler = Sampler.alwaysOff(); borrowing = false; } @@ -697,7 +776,7 @@ index 1ef8abf5..328e63dd 100644 if (rule.getAttributes().isEmpty()) { attributeMatchers = Collections.emptyMap(); -@@ -121,11 +151,16 @@ final class SamplingRuleApplier { +@@ -147,11 +173,16 @@ final class SamplingRuleApplier { private SamplingRuleApplier( String clientId, String ruleName, @@ -714,7 +793,7 @@ index 1ef8abf5..328e63dd 100644 Map attributeMatchers, Matcher urlPathMatcher, Matcher serviceNameMatcher, -@@ -137,11 +172,16 @@ final class SamplingRuleApplier { +@@ -163,11 +194,16 @@ final class SamplingRuleApplier { long nextSnapshotTimeNanos) { this.clientId = clientId; this.ruleName = ruleName; @@ -731,7 +810,7 @@ index 1ef8abf5..328e63dd 100644 this.attributeMatchers = attributeMatchers; this.urlPathMatcher = urlPathMatcher; this.serviceNameMatcher = serviceNameMatcher; -@@ -151,6 +191,7 @@ final class SamplingRuleApplier { +@@ -177,6 +213,7 @@ final class SamplingRuleApplier { this.resourceArnMatcher = resourceArnMatcher; this.statistics = statistics; this.nextSnapshotTimeNanos = nextSnapshotTimeNanos; @@ -739,31 +818,40 @@ index 1ef8abf5..328e63dd 100644 } @SuppressWarnings("deprecation") // TODO -@@ -162,11 +203,14 @@ final class SamplingRuleApplier { - String host = null; - - for (Map.Entry, Object> entry : attributes.asMap().entrySet()) { -- if (entry.getKey().equals(SemanticAttributes.HTTP_TARGET)) { -+ if (entry.getKey().equals(SemanticAttributes.HTTP_TARGET) -+ || entry.getKey().equals(URL_PATH)) { - httpTarget = (String) entry.getValue(); -- } else if (entry.getKey().equals(SemanticAttributes.HTTP_URL)) { -+ } else if (entry.getKey().equals(SemanticAttributes.HTTP_URL) -+ || entry.getKey().equals(URL_FULL)) { - httpUrl = (String) entry.getValue(); -- } else if (entry.getKey().equals(SemanticAttributes.HTTP_METHOD)) { -+ } else if (entry.getKey().equals(SemanticAttributes.HTTP_METHOD) -+ || entry.getKey().equals(HTTP_REQUEST_METHOD)) { - httpMethod = (String) entry.getValue(); - } else if (entry.getKey().equals(SemanticAttributes.NET_HOST_NAME)) { - host = (String) entry.getValue(); -@@ -237,45 +281,84 @@ final class SamplingRuleApplier { - statistics.sampled.increment(); +@@ -257,8 +294,13 @@ final class SamplingRuleApplier { + SpanKind spanKind, + Attributes attributes, + List parentLinks) { ++ // Only emit statistics for spans for which a sampling decision is being made actively ++ // i.e. The root span in a call chain ++ boolean shouldCount = !Span.fromContext(parentContext).getSpanContext().isValid(); + // Incrementing requests first ensures sample / borrow rate are positive. +- statistics.requests.increment(); ++ if (shouldCount) { ++ statistics.requests.increment(); ++ } + boolean reservoirExpired = clock.nanoTime() >= reservoirEndTimeNanos; + SamplingResult result = + !reservoirExpired +@@ -267,51 +309,92 @@ final class SamplingRuleApplier { + : SamplingResult.create(SamplingDecision.DROP); + if (result.getDecision() != SamplingDecision.DROP) { + // We use the result from the reservoir sampler if it worked. +- if (borrowing) { +- statistics.borrowed.increment(); ++ if (shouldCount) { ++ if (borrowing) { ++ statistics.borrowed.increment(); ++ } ++ statistics.sampled.increment(); + } +- statistics.sampled.increment(); return result; } - result = - fixedRateSampler.shouldSample( - parentContext, traceId, name, spanKind, attributes, parentLinks); +- if (result.getDecision() != SamplingDecision.DROP) { + + if (clock.nanoTime() < boostEndTimeNanos) { + result = @@ -774,7 +862,7 @@ index 1ef8abf5..328e63dd 100644 + fixedRateSampler.shouldSample( + parentContext, traceId, name, spanKind, attributes, parentLinks); + } - if (result.getDecision() != SamplingDecision.DROP) { ++ if (shouldCount && result.getDecision() != SamplingDecision.DROP) { statistics.sampled.increment(); } return result; @@ -859,7 +947,7 @@ index 1ef8abf5..328e63dd 100644 + Duration.between(now.toInstant(), target.getReservoirQuotaTtl().toInstant()) .toNanos(); } -@@ -283,16 +366,36 @@ final class SamplingRuleApplier { +@@ -319,16 +402,36 @@ final class SamplingRuleApplier { target.getIntervalSecs() != null ? TimeUnit.SECONDS.toNanos(target.getIntervalSecs()) : AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS; @@ -897,7 +985,7 @@ index 1ef8abf5..328e63dd 100644 attributeMatchers, urlPathMatcher, serviceNameMatcher, -@@ -308,11 +411,16 @@ final class SamplingRuleApplier { +@@ -344,11 +447,16 @@ final class SamplingRuleApplier { return new SamplingRuleApplier( clientId, ruleName, @@ -914,7 +1002,7 @@ index 1ef8abf5..328e63dd 100644 attributeMatchers, urlPathMatcher, serviceNameMatcher, -@@ -328,6 +436,15 @@ final class SamplingRuleApplier { +@@ -364,6 +472,15 @@ final class SamplingRuleApplier { return ruleName; } @@ -929,8 +1017,22 @@ index 1ef8abf5..328e63dd 100644 + @Nullable private static String getArn(Attributes attributes, Resource resource) { - String arn = resource.getAttributes().get(ResourceAttributes.AWS_ECS_CONTAINER_ARN); -@@ -479,5 +596,30 @@ final class SamplingRuleApplier { + String arn = resource.getAttributes().get(AWS_ECS_CONTAINER_ARN); +@@ -500,11 +617,11 @@ final class SamplingRuleApplier { + } + + private Sampler createRateLimited(int numPerSecond) { +- return new RateLimitingSampler(numPerSecond, clock); ++ return Sampler.parentBased(new RateLimitingSampler(numPerSecond, clock)); + } + + private static Sampler createFixedRate(double rate) { +- return Sampler.traceIdRatioBased(rate); ++ return Sampler.parentBased(Sampler.traceIdRatioBased(rate)); + } + + // We keep track of sampling requests and decisions to report to X-Ray to allow it to allocate +@@ -515,5 +632,30 @@ final class SamplingRuleApplier { final LongAdder requests = new LongAdder(); final LongAdder sampled = new LongAdder(); final LongAdder borrowed = new LongAdder(); @@ -962,7 +1064,7 @@ index 1ef8abf5..328e63dd 100644 } } diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java -index 75977dc0..fae13433 100644 +index 75977dc0..48bdeb0f 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java @@ -5,42 +5,81 @@ @@ -970,6 +1072,7 @@ index 75977dc0..fae13433 100644 package io.opentelemetry.contrib.awsxray; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; ++import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; @@ -989,7 +1092,6 @@ index 75977dc0..fae13433 100644 +import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingResult; -+import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; @@ -1048,7 +1150,7 @@ index 75977dc0..fae13433 100644 this( clientId, resource, -@@ -49,8 +88,22 @@ final class XrayRulesSampler implements Sampler { +@@ -49,8 +88,19 @@ final class XrayRulesSampler implements Sampler { rules.stream() // Lower priority value takes precedence so normal ascending sort. .sorted(Comparator.comparingInt(GetSamplingRulesResponse.SamplingRule::getPriority)) @@ -1057,10 +1159,7 @@ index 75977dc0..fae13433 100644 + .map( + rule -> + new SamplingRuleApplier( -+ clientId, -+ rule, -+ resource.getAttribute(ResourceAttributes.SERVICE_NAME), -+ clock)) ++ clientId, rule, resource.getAttribute(SERVICE_NAME), clock)) + .toArray(SamplingRuleApplier[]::new), + createRuleHashMaps(rules), + rules.stream().anyMatch(r -> r.getSamplingRateBoost() != null), @@ -1073,7 +1172,7 @@ index 75977dc0..fae13433 100644 } private XrayRulesSampler( -@@ -58,12 +111,36 @@ final class XrayRulesSampler implements Sampler { +@@ -58,12 +108,36 @@ final class XrayRulesSampler implements Sampler { Resource resource, Clock clock, Sampler fallbackSampler, @@ -1111,7 +1210,7 @@ index 75977dc0..fae13433 100644 } @Override -@@ -74,10 +151,43 @@ final class XrayRulesSampler implements Sampler { +@@ -74,10 +148,43 @@ final class XrayRulesSampler implements Sampler { SpanKind spanKind, Attributes attributes, List parentLinks) { @@ -1157,7 +1256,7 @@ index 75977dc0..fae13433 100644 } } -@@ -96,7 +206,97 @@ final class XrayRulesSampler implements Sampler { +@@ -96,7 +203,97 @@ final class XrayRulesSampler implements Sampler { return "XrayRulesSampler{" + Arrays.toString(ruleAppliers) + "}"; } @@ -1256,7 +1355,7 @@ index 75977dc0..fae13433 100644 return Arrays.stream(ruleAppliers) .map(rule -> rule.snapshot(now)) .filter(Objects::nonNull) -@@ -115,15 +315,16 @@ final class XrayRulesSampler implements Sampler { +@@ -115,15 +312,16 @@ final class XrayRulesSampler implements Sampler { Map ruleTargets, Set requestedTargetRuleNames, Date now) { @@ -1275,7 +1374,7 @@ index 75977dc0..fae13433 100644 } if (requestedTargetRuleNames.contains(rule.getRuleName())) { // In practice X-Ray should return a target for any rule we requested but -@@ -135,6 +336,216 @@ final class XrayRulesSampler implements Sampler { +@@ -135,6 +333,216 @@ final class XrayRulesSampler implements Sampler { return rule; }) .toArray(SamplingRuleApplier[]::new); @@ -1494,7 +1593,7 @@ index 75977dc0..fae13433 100644 } } diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java -index 654a4d57..5af11a25 100644 +index 4e5cd13b..5af11a25 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java +++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java @@ -7,7 +7,10 @@ package io.opentelemetry.contrib.awsxray; @@ -1518,11 +1617,25 @@ index 654a4d57..5af11a25 100644 import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingDecision; import java.io.IOException; -@@ -168,6 +174,32 @@ class AwsXrayRemoteSamplerTest { - } +@@ -169,21 +175,28 @@ class AwsXrayRemoteSamplerTest { } -+ @Test + @Test +- void parentBasedXraySamplerAfterDefaultSampler() { +- rulesResponse.set(RULE_RESPONSE_1); +- try (AwsXrayRemoteSampler samplerWithLongerPollingInterval = +- AwsXrayRemoteSampler.newBuilder(Resource.empty()) +- .setInitialSampler(Sampler.alwaysOn()) +- .setEndpoint(server.httpUri().toString()) +- .setPollingInterval(Duration.ofMillis(5)) +- .build()) { +- await() +- .pollDelay(Duration.ofMillis(10)) +- .untilAsserted( +- () -> { +- assertThat(sampler.getDescription()) +- .startsWith("AwsXrayRemoteSampler{ParentBased{root:XrayRulesSampler{["); +- }); + void setAndResetSpanExporter() { + try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { + // Setting span exporter should only work once @@ -1545,13 +1658,10 @@ index 654a4d57..5af11a25 100644 + sampler.setSpanExporter(mock(SpanExporter.class)); + assertThatCode(() -> sampler.adaptSampling(mock(ReadableSpan.class), mock(SpanData.class))) + .doesNotThrowAnyException(); -+ } -+ } -+ - // https://github.com/open-telemetry/opentelemetry-java-contrib/issues/376 - @Test - void testJitterTruncation() { -@@ -187,6 +219,16 @@ class AwsXrayRemoteSamplerTest { + } + } + +@@ -206,6 +219,16 @@ class AwsXrayRemoteSamplerTest { } } @@ -1569,11 +1679,11 @@ index 654a4d57..5af11a25 100644 return sampler .shouldSample( diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -index 6bb6e82a..6d71711b 100644 +index 920a5ffd..b7c21aa0 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java +++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -@@ -8,18 +8,25 @@ package io.opentelemetry.contrib.awsxray; - import static io.opentelemetry.semconv.resource.attributes.ResourceAttributes.SERVICE_NAME; +@@ -15,18 +15,27 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_ + import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_HOST_NAME; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; +import static org.mockito.Mockito.mock; @@ -1583,7 +1693,9 @@ index 6bb6e82a..6d71711b 100644 import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; ++import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; ++import io.opentelemetry.api.trace.SpanId; import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceId; @@ -1597,8 +1709,8 @@ index 6bb6e82a..6d71711b 100644 +import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.samplers.SamplingDecision; import io.opentelemetry.sdk.trace.samplers.SamplingResult; - import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; -@@ -28,6 +35,7 @@ import java.io.IOException; + import io.opentelemetry.semconv.HttpAttributes; +@@ -37,6 +46,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.time.Duration; import java.time.Instant; @@ -1606,20 +1718,15 @@ index 6bb6e82a..6d71711b 100644 import java.util.Collections; import java.util.Date; import java.util.concurrent.TimeUnit; -@@ -41,6 +49,12 @@ class SamplingRuleApplierTest { +@@ -50,6 +60,7 @@ class SamplingRuleApplierTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final String CLIENT_ID = "test-client-id"; + private static final String TEST_SERVICE_NAME = "test-service-name"; -+ -+ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); -+ private static final AttributeKey URL_FULL = AttributeKey.stringKey("url.full"); -+ private static final AttributeKey HTTP_REQUEST_METHOD = -+ AttributeKey.stringKey("http.request.method"); @Nested @SuppressWarnings("ClassCanBeStatic") -@@ -48,7 +62,10 @@ class SamplingRuleApplierTest { +@@ -57,7 +68,10 @@ class SamplingRuleApplierTest { private final SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1631,23 +1738,7 @@ index 6bb6e82a..6d71711b 100644 private final Resource resource = Resource.builder() -@@ -68,6 +85,15 @@ class SamplingRuleApplierTest { - .put(AttributeKey.longKey("speed"), 10) - .build(); - -+ private final Attributes newSemCovAttributes = -+ Attributes.builder() -+ .put(HTTP_REQUEST_METHOD, "GET") -+ .put(SemanticAttributes.NET_HOST_NAME, "opentelemetry.io") -+ .put(URL_PATH, "/instrument-me") -+ .put(AttributeKey.stringKey("animal"), "cat") -+ .put(AttributeKey.longKey("speed"), 10) -+ .build(); -+ - // FixedRate set to 1.0 in rule and no reservoir - @Test - void fixedRateAlwaysSample() { -@@ -75,7 +101,8 @@ class SamplingRuleApplierTest { +@@ -91,7 +105,8 @@ class SamplingRuleApplierTest { .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); Date now = new Date(); @@ -1657,7 +1748,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -84,7 +111,7 @@ class SamplingRuleApplierTest { +@@ -100,7 +115,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(0); // Reset @@ -1666,7 +1757,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -92,7 +119,7 @@ class SamplingRuleApplierTest { +@@ -108,7 +123,7 @@ class SamplingRuleApplierTest { doSample(applier); doSample(applier); now = new Date(); @@ -1675,78 +1766,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -116,6 +143,21 @@ class SamplingRuleApplierTest { - .isTrue(); - } - -+ @Test -+ void matchesURLFullNewSemCov() { -+ assertThat(applier.matches(newSemCovAttributes, resource)).isTrue(); -+ -+ // http.url works too -+ assertThat( -+ applier.matches( -+ attributes.toBuilder() -+ .remove(URL_FULL) -+ .put(URL_FULL, "scheme://host:port/instrument-me") -+ .build(), -+ resource)) -+ .isTrue(); -+ } -+ - @Test - void serviceNameNotMatch() { - assertThat( -@@ -137,6 +179,13 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void methodNewSemCovNotMatch() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "POST").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void hostNotMatch() { - // Replacing dot with character makes sure we're not accidentally treating dot as regex -@@ -178,6 +227,34 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void pathNewSemCovNotMatch() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-you").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "scheme://host:port/instrument-you") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "scheme://host:port") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ -+ // Correct path, but we ignore anyways since the URL is malformed per spec, scheme is always -+ // present. -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "host:port/instrument-me") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void attributeNotMatch() { - Attributes attributes = -@@ -223,7 +300,10 @@ class SamplingRuleApplierTest { +@@ -283,7 +298,10 @@ class SamplingRuleApplierTest { private final SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1758,22 +1778,7 @@ index 6bb6e82a..6d71711b 100644 private final Resource resource = Resource.builder() -@@ -243,13 +323,23 @@ class SamplingRuleApplierTest { - .put(AttributeKey.longKey("speed"), 10) - .build(); - -+ private final Attributes newSemCovAttributes = -+ Attributes.builder() -+ .put(HTTP_REQUEST_METHOD, "GET") -+ .put(SemanticAttributes.NET_HOST_NAME, "opentelemetry.io") -+ .put(URL_PATH, "/instrument-me?foo=bar&cat=meow") -+ .put(AttributeKey.stringKey("animal"), "cat") -+ .put(AttributeKey.longKey("speed"), 10) -+ .build(); -+ - // FixedRate set to 0.0 in rule and no reservoir - @Test - void fixedRateNeverSample() { +@@ -316,7 +334,8 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); Date now = new Date(); @@ -1783,7 +1788,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -258,7 +348,7 @@ class SamplingRuleApplierTest { +@@ -325,7 +344,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(0); // Reset @@ -1792,7 +1797,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -266,7 +356,7 @@ class SamplingRuleApplierTest { +@@ -333,7 +352,7 @@ class SamplingRuleApplierTest { doSample(applier); doSample(applier); now = new Date(); @@ -1801,64 +1806,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -329,6 +419,26 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void newSemCovMethodMatches() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "BADGETGOOD").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ attributes = newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "BADGET").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ attributes = newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "GETGET").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ } -+ -+ @Test -+ void newSemCovMethodNotMatch() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "POST").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = removeAttribute(newSemCovAttributes, HTTP_REQUEST_METHOD); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void hostMatches() { - Attributes attributes = -@@ -410,6 +520,29 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void pathNewSemCovMatches() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-me?foo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ // Deceptive question mark, it's actually a wildcard :-) -+ attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-meafoo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ } -+ -+ @Test -+ void pathNewSemCovNotMatch() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-mea?foo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "foo/instrument-meafoo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = removeAttribute(newSemCovAttributes, URL_PATH); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void attributeMatches() { - Attributes attributes = -@@ -493,7 +626,10 @@ class SamplingRuleApplierTest { +@@ -626,7 +645,10 @@ class SamplingRuleApplierTest { private final SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1870,7 +1818,7 @@ index 6bb6e82a..6d71711b 100644 private final Resource resource = Resource.builder() -@@ -553,7 +689,10 @@ class SamplingRuleApplierTest { +@@ -677,7 +699,10 @@ class SamplingRuleApplierTest { void borrowing() { SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1882,7 +1830,7 @@ index 6bb6e82a..6d71711b 100644 // Borrow assertThat(doSample(applier)) -@@ -564,7 +703,8 @@ class SamplingRuleApplierTest { +@@ -688,7 +713,8 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); Date now = new Date(); @@ -1892,7 +1840,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -573,7 +713,7 @@ class SamplingRuleApplierTest { +@@ -697,7 +723,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(1); // Reset @@ -1901,7 +1849,7 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -589,7 +729,7 @@ class SamplingRuleApplierTest { +@@ -713,7 +739,7 @@ class SamplingRuleApplierTest { }); now = new Date(); @@ -1910,7 +1858,50 @@ index 6bb6e82a..6d71711b 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -603,7 +743,7 @@ class SamplingRuleApplierTest { +@@ -722,12 +748,50 @@ class SamplingRuleApplierTest { + assertThat(statistics.getBorrowCount()).isEqualTo(1); + } + ++ @Test ++ void generateStatistics() { ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-sample-all.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); ++ ++ // Send a span for which the sampling decision hasn't been made yet ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ ++ // Send spans for which the sampling decision has already been made ++ // Send in different amounts to ensure statistics are generated for correct calls ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ true)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ true)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ ++ // Verify outgoing statistics ++ Date now = new Date(); ++ GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = ++ applier.snapshot(now).getStatisticsDocument(); ++ assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); ++ assertThat(statistics.getRuleName()).isEqualTo("Test"); ++ assertThat(statistics.getTimestamp()).isEqualTo(now); ++ assertThat(statistics.getRequestCount()).isEqualTo(1); ++ assertThat(statistics.getSampledCount()).isEqualTo(1); ++ assertThat(statistics.getBorrowCount()).isEqualTo(0); ++ } ++ + @Test + void ruleWithTarget() { TestClock clock = TestClock.create(); SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1919,7 +1910,7 @@ index 6bb6e82a..6d71711b 100644 // No target yet, borrows from reservoir every second. assertThat(doSample(applier)) .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); -@@ -622,8 +762,8 @@ class SamplingRuleApplierTest { +@@ -746,8 +810,8 @@ class SamplingRuleApplierTest { // Got a target! SamplingTargetDocument target = @@ -1930,7 +1921,7 @@ index 6bb6e82a..6d71711b 100644 // Statistics not expired yet assertThat(applier.snapshot(Date.from(now))).isNull(); -@@ -662,7 +802,7 @@ class SamplingRuleApplierTest { +@@ -786,7 +850,7 @@ class SamplingRuleApplierTest { TestClock clock = TestClock.create(); SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1939,7 +1930,7 @@ index 6bb6e82a..6d71711b 100644 // No target yet, borrows from reservoir every second. assertThat(doSample(applier)) .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); -@@ -680,8 +820,8 @@ class SamplingRuleApplierTest { +@@ -804,8 +868,8 @@ class SamplingRuleApplierTest { assertThat(applier.snapshot(Date.from(now.plus(Duration.ofMinutes(30))))).isNotNull(); // Got a target! @@ -1950,7 +1941,7 @@ index 6bb6e82a..6d71711b 100644 // No reservoir, always use fixed rate (drop) assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); -@@ -691,12 +831,105 @@ class SamplingRuleApplierTest { +@@ -815,12 +879,105 @@ class SamplingRuleApplierTest { assertThat(applier.snapshot(Date.from(now))).isNotNull(); } @@ -2057,7 +2048,7 @@ index 6bb6e82a..6d71711b 100644 Instant now = Instant.ofEpochSecond(0, clock.now()); assertThat(applier.snapshot(Date.from(now))).isNotNull(); -@@ -715,6 +948,71 @@ class SamplingRuleApplierTest { +@@ -839,6 +996,71 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); } @@ -2129,6 +2120,35 @@ index 6bb6e82a..6d71711b 100644 private static SamplingResult doSample(SamplingRuleApplier applier) { return applier.shouldSample( Context.current(), +@@ -849,6 +1071,28 @@ class SamplingRuleApplierTest { + Collections.emptyList()); + } + ++ private static SamplingResult doSampleSpanWithValidContext( ++ SamplingRuleApplier applier, boolean isSampled) { ++ String traceId = TraceId.fromLongs(1, 2); ++ Context parentContext = ++ Context.root() ++ .with( ++ Span.wrap( ++ SpanContext.create( ++ traceId, ++ SpanId.fromLong(1L), ++ isSampled ? TraceFlags.getSampled() : TraceFlags.getDefault(), ++ TraceState.getDefault()))); ++ ++ return applier.shouldSample( ++ parentContext, ++ traceId, ++ SpanId.fromLong(2L), ++ SpanKind.CLIENT, ++ Attributes.empty(), ++ Collections.emptyList()); ++ } ++ + private static GetSamplingRulesResponse.SamplingRule readSamplingRule(String resourcePath) { + try { + return OBJECT_MAPPER.readValue( diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java index 1ca8df34..14ebdbda 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java @@ -3318,14 +3338,35 @@ index 00000000..32752d5e + "speed": "0" + } +} +diff --git a/aws-xray/src/test/resources/sampling-rule-sample-all.json b/aws-xray/src/test/resources/sampling-rule-sample-all.json +new file mode 100644 +index 00000000..4ba3013a +--- /dev/null ++++ b/aws-xray/src/test/resources/sampling-rule-sample-all.json +@@ -0,0 +1,15 @@ ++{ ++ "RuleName": "Test", ++ "RuleARN": "arn:aws:xray:us-east-1:595986152929:sampling-rule/Test", ++ "ResourceARN": "arn:aws:xray:us-east-1:595986152929:my-service", ++ "Priority": 1, ++ "FixedRate": 1.0, ++ "ReservoirSize": 0, ++ "ServiceName": "*", ++ "ServiceType": "*", ++ "Host": "*", ++ "HTTPMethod": "*", ++ "URLPath": "*", ++ "Version": 1, ++ "Attributes": {} ++} diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts -index 041d2e91..e3d60f46 100644 +index 8250c1bd..74a1a24c 100644 --- a/disk-buffering/build.gradle.kts +++ b/disk-buffering/build.gradle.kts -@@ -70,6 +70,10 @@ tasks.named("shadowJar") { +@@ -77,6 +77,10 @@ tasks.named("shadowJar") { mustRunAfter("jar") } - + +tasks.withType().configureEach { + dependsOn("shadowJar") +} @@ -3333,18 +3374,94 @@ index 041d2e91..e3d60f46 100644 // The javadoc from wire's generated classes has errors that make the task that generates the "javadoc" artifact to fail. This // makes the javadoc task to ignore those generated classes. tasks.withType(Javadoc::class.java) { +diff --git a/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java b/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java +index 4c2c9293..4dddd975 100644 +--- a/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java ++++ b/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java +@@ -44,7 +44,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + + @Container + GenericContainer kafka = +- new GenericContainer<>("bitnami/kafka:2.8.1") ++ new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +@@ -80,7 +80,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + }; + + protected GenericContainer kafkaProducerContainer() { +- return new GenericContainer<>("bitnami/kafka:2.8.1") ++ return new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +@@ -207,7 +207,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + + @Container + GenericContainer consumer = +- new GenericContainer<>("bitnami/kafka:2.8.1") ++ new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +diff --git a/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java b/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java +index 8eb9432a..e46ed07b 100644 +--- a/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java ++++ b/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java +@@ -12,7 +12,7 @@ import org.testcontainers.containers.wait.strategy.Wait; + public class KafkaContainerFactory { + private static final int KAFKA_PORT = 9092; + private static final String KAFKA_BROKER = "kafka:" + KAFKA_PORT; +- private static final String KAFKA_DOCKER_IMAGE = "bitnami/kafka:2.8.1"; ++ private static final String KAFKA_DOCKER_IMAGE = "bitnamilegacy/kafka:2.8.1"; + + private KafkaContainerFactory() {} + +diff --git a/opamp-client/build.gradle.kts b/opamp-client/build.gradle.kts +index e41d1fff..84a1d559 100644 +--- a/opamp-client/build.gradle.kts ++++ b/opamp-client/build.gradle.kts +@@ -1,6 +1,4 @@ + import de.undercouch.gradle.tasks.download.DownloadExtension +-import java.net.HttpURLConnection +-import java.net.URL + + plugins { + id("otel.java-conventions") +@@ -50,19 +48,7 @@ abstract class DownloadOpampProtos @Inject constructor( + + @TaskAction + fun execute() { +- // Get the latest release tag by following the redirect from GitHub's latest release URL +- val latestReleaseUrl = "https://github.com/open-telemetry/opamp-spec/releases/latest" +- val connection = URL(latestReleaseUrl).openConnection() as HttpURLConnection +- connection.instanceFollowRedirects = false +- connection.requestMethod = "HEAD" +- +- val redirectLocation = connection.getHeaderField("Location") +- connection.disconnect() +- +- // Extract tag from URL like: https://github.com/open-telemetry/opamp-spec/releases/tag/v0.12.0 +- val latestTag = redirectLocation.substringAfterLast("/") +- // Download the source code for the latest release +- val zipUrl = "https://github.com/open-telemetry/opamp-spec/zipball/$latestTag" ++ val zipUrl = "https://github.com/open-telemetry/opamp-spec/zipball/v0.14.0" + + download.run { + src(zipUrl) diff --git a/version.gradle.kts b/version.gradle.kts -index acefcee9..329b524f 100644 +index f8358006..1f7c517f 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "1.39.0" --val alphaVersion = "1.39.0-alpha" -+val stableVersion = "1.39.0-adot1" -+val alphaVersion = "1.39.0-alpha-adot1" - +-val stableVersion = "1.48.0" +-val alphaVersion = "1.48.0-alpha" ++val stableVersion = "1.48.0-adot1" ++val alphaVersion = "1.48.0-alpha-adot1" + allprojects { if (findProperty("otel.stable") != "true") { --- +-- 2.45.1 diff --git a/.github/patches/opentelemetry-java-instrumentation.patch b/.github/patches/opentelemetry-java-instrumentation.patch index 05f7b3077d..d4b26b1431 100644 --- a/.github/patches/opentelemetry-java-instrumentation.patch +++ b/.github/patches/opentelemetry-java-instrumentation.patch @@ -1,28 +1,41 @@ diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts -index d020848503..88f0a60ac6 100644 +index 98def282f8..65fd6a8a13 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts -@@ -100,7 +100,7 @@ val DEPENDENCIES = listOf( - "commons-validator:commons-validator:1.9.0", +@@ -104,7 +104,7 @@ val DEPENDENCIES = listOf( "io.netty:netty:3.10.6.Final", + "io.opentelemetry.contrib:opentelemetry-azure-resources:${otelContribVersion}", "io.opentelemetry.contrib:opentelemetry-aws-resources:${otelContribVersion}", - "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:${otelContribVersion}", -+ "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.39.0-alpha-adot1", ++ "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.48.0-alpha-adot1", "io.opentelemetry.contrib:opentelemetry-gcp-resources:${otelContribVersion}", + "io.opentelemetry.contrib:opentelemetry-cloudfoundry-resources:${otelContribVersion}", "io.opentelemetry.contrib:opentelemetry-baggage-processor:${otelContribVersion}", - "io.opentelemetry.proto:opentelemetry-proto:1.4.0-alpha", +diff --git a/instrumentation/jmx-metrics/library/src/test/java/io/opentelemetry/instrumentation/jmx/rules/TomcatIntegrationTest.java b/instrumentation/jmx-metrics/library/src/test/java/io/opentelemetry/instrumentation/jmx/rules/TomcatIntegrationTest.java +index 1234567890..abcdef1234 100644 +--- a/instrumentation/jmx-metrics/library/src/test/java/io/opentelemetry/instrumentation/jmx/rules/TomcatIntegrationTest.java ++++ b/instrumentation/jmx-metrics/library/src/test/java/io/opentelemetry/instrumentation/jmx/rules/TomcatIntegrationTest.java +@@ -20,8 +20,8 @@ public class TomcatIntegrationTest extends TargetSystemTest { + + @ParameterizedTest + @CsvSource({ ++ // TODO: Remove this patch after we no longer depend on 2.18.1 of opentelemetry-java-instrumentation. + "tomcat:10.0, https://tomcat.apache.org/tomcat-10.0-doc/appdev/sample/sample.war", +- "tomcat:9.0, https://tomcat.apache.org/tomcat-9.0-doc/appdev/sample/sample.war" + }) + void testCollectedMetrics(String dockerImageName, String sampleWebApplicationUrl) + throws Exception { diff --git a/version.gradle.kts b/version.gradle.kts -index a1cae43b4b..c1520e9947 100644 +index 023d04703c..ec9690086c 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "2.11.0" --val alphaVersion = "2.11.0-alpha" -+val stableVersion = "2.11.0-adot1" -+val alphaVersion = "2.11.0-adot1-alpha" - +-val stableVersion = "2.18.1" +-val alphaVersion = "2.18.1-alpha" ++val stableVersion = "2.18.1-adot1" ++val alphaVersion = "2.18.1-adot1-alpha" + allprojects { if (findProperty("otel.stable") != "true") { --- +-- 2.45.1 - diff --git a/.github/patches/versions b/.github/patches/versions index be4e3bbcb6..b1b741c455 100644 --- a/.github/patches/versions +++ b/.github/patches/versions @@ -1,2 +1,2 @@ -OTEL_JAVA_INSTRUMENTATION_VERSION=v2.11.0 -OTEL_JAVA_CONTRIB_VERSION=v1.39.0 \ No newline at end of file +OTEL_JAVA_INSTRUMENTATION_VERSION=v2.18.1 +OTEL_JAVA_CONTRIB_VERSION=v1.48.0 \ No newline at end of file diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index e436bc82df..ca152b37b0 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -26,12 +26,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 - - uses: actions/download-artifact@v4 + - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #5.0.0 with: name: aws-opentelemetry-agent.jar @@ -99,6 +99,13 @@ jobs: secrets: inherit with: caller-workflow-name: 'main-build' + + java-ec2-adaptive-sampling-test: + needs: [ upload-main-build ] + uses: aws-observability/aws-application-signals-test-framework/.github/workflows/java-ec2-adaptive-sampling-test.yml@main + secrets: inherit + with: + caller-workflow-name: 'main-build' # # DOCKER DISTRIBUTION LANGUAGE VERSION COVERAGE # DEFAULT SETTING: {Java Version}, EKS, AMD64, AL2 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 024cef746b..1ff9f43d9a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,21 +23,21 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 with: languages: java - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -50,12 +50,47 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Manually build to avoid autobuild failures - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 + + all-codeql-checks-pass: + runs-on: ubuntu-latest + needs: [analyze] + if: always() + steps: + - name: Checkout to get workflow file + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + + - name: Check all jobs succeeded and none missing + run: | + # Check if all needed jobs succeeded + results='${{ toJSON(needs) }}' + if echo "$results" | jq -r '.[] | .result' | grep -v success; then + echo "Some jobs failed" + exit 1 + fi + + # Extract all job names from workflow (excluding this gate job) + all_jobs=$(yq eval '.jobs | keys | .[]' .github/workflows/codeql.yml | grep -v "all-codeql-checks-pass" | sort) + + # Extract job names from needs array + needed_jobs='${{ toJSON(needs) }}' + needs_list=$(echo "$needed_jobs" | jq -r 'keys[]' | sort) + + # Check if any jobs are missing from needs + missing_jobs=$(comm -23 <(echo "$all_jobs") <(echo "$needs_list")) + if [ -n "$missing_jobs" ]; then + echo "ERROR: Jobs missing from needs array in all-codeql-checks-pass:" + echo "$missing_jobs" + echo "Please add these jobs to the needs array of all-codeql-checks-pass" + exit 1 + fi + + echo "All CodeQL checks passed and no jobs missing from gate!" diff --git a/.github/workflows/owasp.yml b/.github/workflows/daily-scan.yml similarity index 81% rename from .github/workflows/owasp.yml rename to .github/workflows/daily-scan.yml index 54ce812326..fc801cee62 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/daily-scan.yml @@ -8,8 +8,8 @@ name: Daily scan on: - schedule: - - cron: '0 18 * * *' # scheduled to run at 18:00 UTC every day + schedule: # scheduled to run every 6 hours + - cron: '10 */6 * * *' # “At minute 10 past every 6th hour.” workflow_dispatch: # be able to run the workflow on demand env: @@ -24,24 +24,24 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo for dependency scan - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 with: fetch-depth: 0 - name: Set up Java for dependency scan - uses: actions/setup-java@v4 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS credentials for dependency scan - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get NVD API key for dependency scan - uses: aws-actions/aws-secretsmanager-get-secrets@v1 + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 id: nvd_api_key with: secret-ids: ${{ secrets.NVD_API_KEY_SECRET_ARN }} @@ -51,7 +51,7 @@ jobs: uses: ./.github/actions/patch-dependencies - name: Build JAR - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: assemble -PlocalDocker=true @@ -76,6 +76,17 @@ jobs: if: ${{ steps.dep_scan.outcome != 'success' }} run: less dependency-check-report.html + - name: Configure AWS credentials for image scan + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + with: + role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Login to Public ECR + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + with: + registry: public.ecr.aws + - name: Perform high image scan on v1 if: always() id: high_scan_v1 @@ -83,6 +94,7 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v1.33.0" severity: 'CRITICAL,HIGH' + logout: 'false' - name: Perform low image scan on v1 if: always() @@ -91,26 +103,29 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v1.33.0" severity: 'MEDIUM,LOW,UNKNOWN' + logout: 'false' - name: Perform high image scan on v2 if: always() id: high_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.5" severity: 'CRITICAL,HIGH' + logout: 'false' - name: Perform low image scan on v2 if: always() id: low_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.5" severity: 'MEDIUM,LOW,UNKNOWN' + logout: 'false' - name: Configure AWS Credentials for emitting metrics if: always() - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index ca7528e241..6c61ffdd3a 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -19,24 +19,24 @@ jobs: build-corretto: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: gradle/wrapper-validation-action@v1 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 - name: Build docker image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #6.18.0 with: push: true context: scripts/docker/corretto-slim diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 7bcb06a6fe..6226dbac5d 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -20,14 +20,14 @@ jobs: build-docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -38,18 +38,18 @@ jobs: with: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push docker image - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: :smoke-tests:fakebackend:jib diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index d09283cb8f..0fc0045c62 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -25,19 +25,19 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -51,27 +51,27 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/wrapper-validation-action@v1 + uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push agent and testing docker images with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 with: arguments: jib env: COMMIT_HASH: ${{ inputs.image_tag }} - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 test_Spring_App_With_Java_Agent: name: Test Spring App with AWS OTel Java agent @@ -79,19 +79,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -110,19 +110,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -141,19 +141,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index ffacf0c74c..3c4ebe301a 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -34,19 +34,19 @@ jobs: build-sample-app: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -60,20 +60,21 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push Sample-Apps without Auto-Instrumentation Agent - uses: gradle/gradle-build-action@v3 - with: - arguments: jibBuildWithoutAgent + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 + + - name: Build and push Sample-Apps with Auto-Instrumentation Agent + run: jibBuildWithoutAgent env: COMMIT_HASH: ${{ inputs.image_tag }} @@ -84,20 +85,20 @@ jobs: test-case-batch-value: ${{ steps.set-batches.outputs.batch-values }} steps: - name: Checkout Testing Framework repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework ref: ${{ inputs.test_ref }} - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up Go 1.x - uses: actions/setup-go@v5 + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 #v6.0.0 with: go-version: '~1.18.9' @@ -126,24 +127,24 @@ jobs: steps: # required for versioning - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@v4 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: distribution: 'zulu' java-version: '11' - name: Set up terraform - uses: hashicorp/setup-terraform@v3 + uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 with: terraform_version: "~1.5" - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.JAVA_INSTRUMENTATION_INTEG_TEST_ARN}} aws-region: us-west-2 @@ -151,7 +152,7 @@ jobs: role-duration-seconds: 14400 - name: Checkout Testing Framework repository - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 30c74f3c87..622cba16e3 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -4,6 +4,7 @@ on: branches: - main - "release/v*" + workflow_dispatch: # be able to run the workflow on demand env: AWS_DEFAULT_REGION: us-east-1 STAGING_ECR_REGISTRY: 611364707713.dkr.ecr.us-west-2.amazonaws.com @@ -22,24 +23,24 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@v4 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 #v5.0.0 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -54,17 +55,17 @@ jobs: staging_registry: ${{ steps.imageOutput.outputs.stagingRegistry }} staging_repository: ${{ steps.imageOutput.outputs.stagingRepository }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -76,21 +77,21 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true env: @@ -128,7 +129,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -189,30 +190,30 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version: 23 distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -222,31 +223,31 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Build Application Signals Lambda Layer working-directory: lambda-layer run: | ./build-layer.sh - name: Upload layer zip to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-java-layer.zip path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 @@ -256,6 +257,7 @@ jobs: aws s3 cp ./build/distributions/aws-opentelemetry-java-layer.zip s3://adot-main-build-staging-jar/adot-java-lambda-layer-${{ github.run_id }}.zip application-signals-e2e-test: + name: "Application Signals E2E Test" needs: [build, application-signals-lambda-layer-build] uses: ./.github/workflows/application-signals-e2e-test.yml secrets: inherit @@ -269,7 +271,7 @@ jobs: if: always() steps: - name: Configure AWS Credentials for emitting metrics - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: us-east-1 diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index c97db6704c..1c845748f1 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -23,18 +23,18 @@ jobs: image_name: ${{ steps.imageOutput.outputs.imageName }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -46,21 +46,21 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build --stacktrace -PenableCoverage=true -PtestUpstreamSnapshots=true env: @@ -95,7 +95,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -129,23 +129,23 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -153,7 +153,7 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: contractTests -PtestUpstreamSnapshots=true -PlocalDocker=true diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 4cbc3965fa..47a5b0c8e4 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -37,14 +37,14 @@ jobs: name: Check out release branch # Will fail if there is no release branch yet or succeed otherwise continue-on-error: true - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ steps.parse-release-branch.outputs.release-branch-name }} - id: checkout-release-tag name: Check out release tag # If there is already a release branch, the previous step succeeds and we don't run this or the next one. if: ${{ steps.checkout-release-branch.outcome == 'failure' }} - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ steps.parse-release-branch.outputs.release-tag-name }} - name: Create release branch @@ -57,21 +57,21 @@ jobs: needs: prepare-release-branch steps: - name: Checkout release branch - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ needs.prepare-release-branch.outputs.release-branch-name }} - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -88,33 +88,32 @@ jobs: # Trim whitespaces and cherrypick echo $word | sed 's/ *$//g' | sed 's/^ *//g' | git cherry-pick --stdin done - - name: Build release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build integrationTests -PlocalDocker=true -Prelease.version=${{ github.event.inputs.version }} --stacktrace - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -128,7 +127,7 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -138,7 +137,7 @@ jobs: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v${{ github.event.inputs.version }} - name: Build and Publish release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build final closeAndReleaseSonatypeStagingRepository -Prelease.version=${{ github.event.inputs.version }} --stacktrace env: diff --git a/.github/workflows/post-release-version-bump.yml b/.github/workflows/post-release-version-bump.yml new file mode 100644 index 0000000000..b559262338 --- /dev/null +++ b/.github/workflows/post-release-version-bump.yml @@ -0,0 +1,146 @@ +name: Post Release - Prepare Main for Next Development Cycle + +on: + workflow_dispatch: + inputs: + version: + description: 'Version number (e.g., 1.0.1)' + required: true + is_patch: + description: 'Is this a patch? (true or false)' + required: true + default: 'false' + +env: + AWS_DEFAULT_REGION: us-east-1 + +permissions: + id-token: write + contents: write + pull-requests: write + +jobs: + check-version: + runs-on: ubuntu-latest + steps: + - name: Checkout main + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + with: + ref: main + fetch-depth: 0 + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Get current major.minor version from main branch + id: get_version + run: | + CURRENT_VERSION=$(grep '__version__' aws-opentelemetry-distro/src/amazon/opentelemetry/distro/version.py | sed -E 's/__version__ = "([0-9]+\.[0-9]+)\.[0-9]+.*"/\1/') + echo "CURRENT_MAJOR_MINOR_VERSION=$CURRENT_VERSION" >> $GITHUB_ENV + + - name: Set major and minor for current version + run: | + echo "CURRENT_MAJOR=$(echo $CURRENT_MAJOR_MINOR_VERSION | cut -d. -f1)" >> $GITHUB_ENV + echo "CURRENT_MINOR=$(echo $CURRENT_MAJOR_MINOR_VERSION | cut -d. -f2)" >> $GITHUB_ENV + + - name: Set major and minor for input version + run: | + echo "INPUT_MAJOR=$(echo $MAJOR_MINOR | cut -d. -f1)" >> $GITHUB_ENV + echo "INPUT_MINOR=$(echo $MAJOR_MINOR | cut -d. -f2)" >> $GITHUB_ENV + + - name: Compare major.minor version and skip if behind + run: | + if [ "$CURRENT_MAJOR" -gt "$INPUT_MAJOR" ] || { [ "$CURRENT_MAJOR" -eq "$INPUT_MAJOR" ] && [ "$CURRENT_MINOR" -gt "$INPUT_MINOR" ]; }; then + echo "Input version is behind main's current major.minor version, don't need to update major version" + exit 1 + fi + + + prepare-main: + runs-on: ubuntu-latest + needs: check-version + steps: + - name: Configure AWS credentials for BOT secrets + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + with: + role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Get Bot secrets + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 + id: bot_secrets + with: + secret-ids: | + BOT_TOKEN ,${{ secrets.BOT_TOKEN_SECRET_ARN }} + parse-json-secrets: true + + - name: Setup Git + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + with: + fetch-depth: 0 + token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + + - name: Configure Git + run: | + git config user.name "github-actions" + git config user.email "github-actions@github.com" + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Determine release branch and checkout + run: | + RELEASE_BRANCH="release/v${MAJOR_MINOR}.x" + git fetch origin $RELEASE_BRANCH + git checkout -b "prepare-main-for-next-dev-cycle-${VERSION}" origin/$RELEASE_BRANCH + + - name: Update version to next development version in main + run: | + DEV_VERSION="${{ github.event.inputs.version }}-SNAPSHOT" + sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${DEV_VERSION}\"/" version.gradle.kts + VERSION="${{ github.event.inputs.version }}" + sed -i'' -e 's/adot-autoinstrumentation-java:v2.*"/adot-autoinstrumentation-java:v'$VERSION'"/' .github/workflows/daily-scan.yml + + # for patch releases, avoid merge conflict by manually resolving CHANGELOG with main + if [[ "${{ github.event.inputs.is_patch }}" == "true" ]]; then + # Copy the patch release entries + sed -n "/^## v${VERSION}/,/^## v[0-9]/p" CHANGELOG.md | sed '$d' > /tmp/patch_release_section.txt + git fetch origin main + git show origin/main:CHANGELOG.md > CHANGELOG.md + # Insert the patch release entries after Unreleased + awk -i inplace '/^## v[0-9]/ && !inserted { system("cat /tmp/patch_release_section.txt"); inserted=1 } {print}' CHANGELOG.md + fi + + git add version.gradle.kts + git add .github/workflows/daily-scan.yml + git add CHANGELOG.md + git commit -m "Prepare main for next development cycle: Update version to $DEV_VERSION" + git push --set-upstream origin "prepare-main-for-next-dev-cycle-${VERSION}" + + - name: Create Pull Request to main + env: + GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + run: | + DEV_VERSION="${{ github.event.inputs.version }}-SNAPSHOT" + gh pr create --title "Post release $VERSION: Update version to $DEV_VERSION" \ + --body "This PR prepares the main branch for the next development cycle by updating the version to $DEV_VERSION and updating the image version to be scanned to the latest released. + + This PR should only be merge when release for version v$VERSION is success. + + By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice." \ + --head prepare-main-for-next-dev-cycle-${VERSION} \ + --base main + + - name: Force our CHANGELOG to override merge conflicts + run: | + git merge origin/main || true + git checkout --ours CHANGELOG.md + git add CHANGELOG.md + if ! git diff --quiet --cached; then + git commit -m "Force our CHANGELOG to override merge conflicts" + git push origin "prepare-main-for-next-dev-cycle-${VERSION}" + fi \ No newline at end of file diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index fd2d7cf2ae..2c390d9ba2 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -1,6 +1,12 @@ name: PR Build on: pull_request: + types: + - opened + - reopened + - synchronize + - labeled + - unlabeled branches: - main - "release/v*" @@ -8,30 +14,85 @@ env: TEST_TAG: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:test-v2 jobs: + static-code-checks: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + + - name: Check CHANGELOG + if: always() + run: | + # Check if PR is from workflows bot or dependabot + if [[ "${{ github.event.pull_request.user.login }}" == "aws-application-signals-bot" ]]; then + echo "Skipping check: PR from aws-application-signals-bot" + exit 0 + fi + + if [[ "${{ github.event.pull_request.user.login }}" == "dependabot[bot]" ]]; then + echo "Skipping check: PR from dependabot" + exit 0 + fi + + # Check for skip changelog label + if echo '${{ toJSON(github.event.pull_request.labels.*.name) }}' | jq -r '.[]' | grep -q "skip changelog"; then + echo "Skipping check: skip changelog label found" + exit 0 + fi + + # Fetch base branch and check for CHANGELOG modifications + git fetch origin ${{ github.base_ref }} + if git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -q "CHANGELOG.md"; then + echo "CHANGELOG.md entry found - check passed" + exit 0 + fi + + echo "It looks like you didn't add an entry to CHANGELOG.md. If this change affects the SDK behavior, please update CHANGELOG.md and link this PR in your entry. If this PR does not need a CHANGELOG entry, you can add the 'Skip Changelog' label to this PR." + exit 1 + + - name: Check for versioned GitHub actions + if: always() + run: | + # Get changed GitHub workflow/action files + CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -E "^\.github/(workflows|actions)/.*\.ya?ml$" || true) + + if [ -n "$CHANGED_FILES" ]; then + # Check for any versioned actions, excluding comments and this validation script + VIOLATIONS=$(grep -Hn "uses:.*@v" $CHANGED_FILES | grep -v "grep.*uses:.*@v" | grep -v "#.*@v" || true) + if [ -n "$VIOLATIONS" ]; then + echo "Found versioned GitHub actions. Use commit SHAs instead:" + echo "$VIOLATIONS" + exit 1 + fi + fi + + echo "No versioned actions found in changed files" + testpatch: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@v4 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - uses: ./.github/actions/patch-dependencies with: @@ -54,14 +115,14 @@ jobs: # https://github.com/open-telemetry/opentelemetry-java/issues/4560 - os: ${{ startsWith(github.event.pull_request.base.ref, 'release/v') && 'windows-latest' || '' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 # Cleanup directories before proceeding with setup - name: Clean up old installations @@ -72,7 +133,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -83,7 +144,7 @@ jobs: if: ${{ matrix.os != 'windows-latest' }} # Skip patch on windows as it is not possible to build opentelemetry-java on windows - name: Build with Gradle with Integration tests - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 if: ${{ matrix.os == 'ubuntu-latest' }} with: arguments: build integrationTests --stacktrace -PenableCoverage=true -PlocalDocker=true @@ -93,7 +154,7 @@ jobs: ./gradlew build -p exporters/aws-distro-opentelemetry-xray-udp-span-exporter - name: Set up Java version for tests - uses: actions/setup-java@v4 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: temurin @@ -103,15 +164,15 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Run contract tests - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 if: ${{ matrix.os == 'ubuntu-latest' }} with: arguments: contractTests -PlocalDocker=true -i - name: Set up Java version for image build - uses: actions/setup-java@v4 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Get current version @@ -121,17 +182,17 @@ jobs: echo "ADOT_JAVA_VERSION=$(./gradlew printVersion -q )" >> $GITHUB_ENV - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 if: ${{ matrix.os == 'ubuntu-latest' }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 if: ${{ matrix.os == 'ubuntu-latest' }} - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 if: ${{ matrix.os == 'ubuntu-latest' }} with: push: false @@ -147,6 +208,7 @@ jobs: with: image-ref: ${{ env.TEST_TAG }} severity: 'CRITICAL,HIGH,MEDIUM,LOW,UNKNOWN' + logout: 'true' - name: Test docker image if: ${{ matrix.os == 'ubuntu-latest' }} @@ -154,25 +216,59 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ env.ADOT_JAVA_VERSION }}" - name: Build with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 if: ${{ matrix.os != 'ubuntu-latest' && (hashFiles('.github/patches/opentelemetry-java*.patch') == '' || matrix.os != 'windows-latest' ) }} # build on windows as well unless a patch exists with: arguments: build --stacktrace -PenableCoverage=true - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 build-lambda: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Build layer working-directory: lambda-layer run: ./build-layer.sh + all-pr-checks-pass: + runs-on: ubuntu-latest + needs: [static-code-checks, testpatch, build, build-lambda] + if: always() + steps: + - name: Checkout to get workflow file + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + + - name: Check all jobs succeeded and none missing + run: | + # Check if all needed jobs succeeded + results='${{ toJSON(needs) }}' + if echo "$results" | jq -r '.[] | .result' | grep -v success; then + echo "Some jobs failed" + exit 1 + fi + + # Extract all job names from workflow (excluding this gate job) + all_jobs=$(yq eval '.jobs | keys | .[]' .github/workflows/pr-build.yml | grep -v "all-pr-checks-pass" | sort) + + # Extract job names from needs array + needed_jobs='${{ toJSON(needs) }}' + needs_list=$(echo "$needed_jobs" | jq -r 'keys[]' | sort) + + # Check if any jobs are missing from needs + missing_jobs=$(comm -23 <(echo "$all_jobs") <(echo "$needs_list")) + if [ -n "$missing_jobs" ]; then + echo "ERROR: Jobs missing from needs array in all-pr-checks-pass:" + echo "$missing_jobs" + echo "Please add these jobs to the needs array of all-pr-checks-pass" + exit 1 + fi + + echo "All checks passed and no jobs missing from gate!" diff --git a/.github/workflows/pre-release-prepare.yml b/.github/workflows/pre-release-prepare.yml new file mode 100644 index 0000000000..4ef8a0ed0a --- /dev/null +++ b/.github/workflows/pre-release-prepare.yml @@ -0,0 +1,114 @@ +name: Pre Release Prepare - Update Version and Create PR + +on: + workflow_dispatch: + inputs: + version: + description: 'Version number (e.g., 1.0.1)' + required: true + is_patch: + description: 'Is this a patch? (true or false)' + required: true + default: 'false' + +env: + AWS_DEFAULT_REGION: us-east-1 + +permissions: + contents: write + pull-requests: write + id-token: write + + +jobs: + update-version-and-create-pr: + runs-on: ubuntu-latest + steps: + - name: Configure AWS credentials for BOT secrets + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + with: + role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Get Bot secrets + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 + id: bot_secrets + with: + secret-ids: | + BOT_TOKEN ,${{ secrets.BOT_TOKEN_SECRET_ARN }} + parse-json-secrets: true + + - name: Checkout main branch + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + with: + ref: 'main' + token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + + - name: Setup Git + run: | + git config user.name "github-actions" + git config user.email "github-actions@github.com" + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Create branches + run: | + IS_PATCH=${{ github.event.inputs.is_patch }} + if [[ "$IS_PATCH" != "true" && "$IS_PATCH" != "false" ]]; then + echo "Invalid input for IS_PATCH. Must be 'true' or 'false'." + exit 1 + fi + + + if git ls-remote --heads origin release/v${MAJOR_MINOR}.x | grep -q "release/v${MAJOR_MINOR}.x"; then + if [ "$IS_PATCH" = "true" ]; then + git fetch origin release/v${MAJOR_MINOR}.x + echo "Branch release/v${MAJOR_MINOR}.x already exists, checking out." + git checkout "release/v${MAJOR_MINOR}.x" + else + echo "Error, release series branch release/v${MAJOR_MINOR}.x exist for non-patch release" + echo "Check your input or branch" + exit 1 + fi + else + if [ "$IS_PATCH" = "true" ]; then + echo "Error, release series branch release/v${MAJOR_MINOR}.x NOT exist for patch release" + echo "Check your input or branch" + exit 1 + else + echo "Creating branch release/v${MAJOR_MINOR}.x." + git checkout -b "release/v${MAJOR_MINOR}.x" + git push origin "release/v${MAJOR_MINOR}.x" + fi + fi + + git checkout -b "v${VERSION}_release" + git push origin "v${VERSION}_release" + + - name: Update version in file + run: | + sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${VERSION}\"/" version.gradle.kts + git commit -am "Update version to ${VERSION}" + git push origin "v${VERSION}_release" + + - name: Update CHANGELOG for release + if: github.event.inputs.is_patch != 'true' + run: | + sed -i "s/## Unreleased/## Unreleased\n\n## v${VERSION} - $(date +%Y-%m-%d)/" CHANGELOG.md + git add CHANGELOG.md + git commit -m "Update CHANGELOG for version ${VERSION}" + git push origin "v${VERSION}_release" + + - name: Create pull request against the release branch + env: + GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + run: | + gh pr create --title "Pre-release: Update version to ${VERSION}" \ + --body "This PR updates the version to ${VERSION}. + + By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice." \ + --head v${{ github.event.inputs.version }}_release \ + --base release/v${MAJOR_MINOR}.x \ No newline at end of file diff --git a/.github/workflows/publish-status.yml b/.github/workflows/publish-status.yml index 1efeb9c04d..5159e2bddf 100644 --- a/.github/workflows/publish-status.yml +++ b/.github/workflows/publish-status.yml @@ -37,7 +37,7 @@ jobs: contents: read steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.roleArn }} aws-region: ${{ inputs.region }} diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index ce9d29ddc8..e8d22a7364 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -5,6 +5,10 @@ on: version: description: The version to tag the release with, e.g., 1.2.0, 1.2.1-alpha.1 required: true + aws_region: + description: 'Deploy lambda layer to aws regions' + required: true + default: 'us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1, af-south-1, ap-east-1, ap-south-2, ap-southeast-3, ap-southeast-4, eu-central-2, eu-south-1, eu-south-2, il-central-1, me-central-1, me-south-1, ap-southeast-5, ap-southeast-7, mx-central-1, ca-west-1, cn-north-1, cn-northwest-1' env: AWS_PUBLIC_ECR_REGION: us-east-1 @@ -13,23 +17,44 @@ env: PUBLIC_REPOSITORY: public.ecr.aws/aws-observability/adot-autoinstrumentation-java PRIVATE_REPOSITORY: 020628701572.dkr.ecr.us-west-2.amazonaws.com/adot-autoinstrumentation-java PRIVATE_REGISTRY: 020628701572.dkr.ecr.us-west-2.amazonaws.com - ARTIFACT_NAME: aws-opentelemetry-agent.jar + ARTIFACT_NAME: aws-opentelemetry-agent.jar + # Legacy list of commercial regions to deploy to. New regions should NOT be added here, and instead should be added to the `aws_region` default input to the workflow. + LEGACY_COMMERCIAL_REGIONS: us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1 + LAYER_NAME: AWSOpenTelemetryDistroJava permissions: id-token: write contents: write jobs: - build: + build-sdk: environment: Release runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-java@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + + - name: Check main build status + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + WORKFLOW_ID=$(gh api repos/${{ github.repository }}/actions/workflows --jq '.workflows[] | select(.name=="Java Agent Main Build") | .id') + LATEST_RUN=$(gh api repos/${{ github.repository }}/actions/workflows/$WORKFLOW_ID/runs --jq '[.workflow_runs[] | select(.head_branch=="${{ github.ref_name }}")] | sort_by(.created_at) | .[-1] | {conclusion, status}') + STATUS=$(echo "$LATEST_RUN" | jq -r '.status') + CONCLUSION=$(echo "$LATEST_RUN" | jq -r '.conclusion') + + if [ "$STATUS" = "in_progress" ] || [ "$STATUS" = "queued" ]; then + echo "Main build is still running (status: $STATUS). Cannot proceed with release." + exit 1 + elif [ "$CONCLUSION" != "success" ]; then + echo "Latest main build on branch ${{ github.ref_name }} conclusion: $CONCLUSION" + exit 1 + fi + echo "Main build succeeded, proceeding with release" + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Publish patched dependencies to maven local uses: ./.github/actions/patch-dependencies @@ -38,53 +63,134 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build integrationTests -PlocalDocker=true -Prelease.version=${{ github.event.inputs.version }} --stacktrace + - name: Upload SDK artifact + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 + with: + name: ${{ env.ARTIFACT_NAME }} + path: otelagent/build/libs/aws-opentelemetry-agent-${{ github.event.inputs.version }}.jar + + build-layer: + needs: build-sdk + runs-on: ubuntu-latest + outputs: + aws_regions_json: ${{ steps.set-matrix.outputs.aws_regions_json }} + steps: + - name: Set up regions matrix + id: set-matrix + env: + AWS_REGIONS: ${{ github.event.inputs.aws_region }} + run: | + IFS=',' read -ra REGIONS <<< "$AWS_REGIONS" + MATRIX="[" + for region in "${REGIONS[@]}"; do + trimmed_region=$(echo "$region" | xargs) + MATRIX+="\"$trimmed_region\"," + done + MATRIX="${MATRIX%,}]" + echo ${MATRIX} + echo "aws_regions_json=${MATRIX}" >> $GITHUB_OUTPUT + + - name: Checkout Repo @ SHA - ${{ github.sha }} + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + with: + java-version-file: .java-version + distribution: 'temurin' + + - name: Build layers + working-directory: lambda-layer + run: | + ./build-layer.sh + + - name: Upload layer + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 + with: + name: layer.zip + path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip + + publish-sdk: + runs-on: ubuntu-latest + needs: [build-sdk, build-layer] + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + with: + java-version-file: .java-version + distribution: 'temurin' + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + + - name: Publish patched dependencies to maven local + uses: ./.github/actions/patch-dependencies + with: + gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} + gpg_password: ${{ secrets.GPG_PASSPHRASE }} + - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + with: + role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} + aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }} + + - name: Log in to AWS ECR + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + with: + registry: public.ecr.aws + + # build the artifact again so that its in the output path expected for building the docker image. + - name: Build release with Gradle + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 + with: + arguments: build integrationTests -PlocalDocker=true -Prelease.version=${{ github.event.inputs.version }} --stacktrace + + - name: Configure AWS Credentials for public ECR + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Configure AWS Credentials for Private ECR - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_PRIVATE_ECR_REGION }} - name: Log in to AWS private ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: ${{ env.PRIVATE_REGISTRY }} - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -94,11 +200,13 @@ jobs: load: true - name: Test docker image + env: + VERSION: ${{ github.event.inputs.version }} shell: bash - run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" + run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "$VERSION" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -109,7 +217,7 @@ jobs: ${{ env.PRIVATE_REPOSITORY }}:v${{ github.event.inputs.version }} - name: Build and Publish release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build final closeAndReleaseSonatypeStagingRepository -Prelease.version=${{ github.event.inputs.version }} --stacktrace env: @@ -119,28 +227,246 @@ jobs: GPG_PRIVATE_KEY: ${{ secrets.GPG_PRIVATE_KEY }} GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} + publish-layer-prod: + runs-on: ubuntu-latest + needs: [build-layer, publish-sdk] + strategy: + matrix: + aws_region: ${{ fromJson(needs.build-layer.outputs.aws_regions_json) }} + steps: + - name: role arn + env: + LEGACY_COMMERCIAL_REGIONS: ${{ env.LEGACY_COMMERCIAL_REGIONS }} + run: | + LEGACY_COMMERCIAL_REGIONS_ARRAY=(${LEGACY_COMMERCIAL_REGIONS//,/ }) + FOUND=false + for REGION in "${LEGACY_COMMERCIAL_REGIONS_ARRAY[@]}"; do + if [[ "$REGION" == "${{ matrix.aws_region }}" ]]; then + FOUND=true + break + fi + done + if [ "$FOUND" = true ]; then + echo "Found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" + SECRET_KEY="LAMBDA_LAYER_RELEASE" + else + echo "Not found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" + SECRET_KEY="${{ matrix.aws_region }}_LAMBDA_LAYER_RELEASE" + fi + SECRET_KEY=${SECRET_KEY//-/_} + echo "SECRET_KEY=${SECRET_KEY}" >> $GITHUB_ENV - - name: Get SHA256 checksum of release artifact - id: get_sha256 + - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + with: + role-to-assume: ${{ secrets[env.SECRET_KEY] }} + role-duration-seconds: 1200 + aws-region: ${{ matrix.aws_region }} + + - name: Get s3 bucket name for release run: | - cp "otelagent/build/libs/aws-opentelemetry-agent-${{ github.event.inputs.version }}.jar" ${{ env.ARTIFACT_NAME }} - shasum -a 256 ${{ env.ARTIFACT_NAME }} > ${{ env.ARTIFACT_NAME }}.sha256 + echo BUCKET_NAME=java-lambda-layer-${{ github.run_id }}-${{ matrix.aws_region }} | tee --append $GITHUB_ENV + + - name: download layer.zip + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 + with: + name: layer.zip + + - name: publish + run: | + aws s3 mb s3://${{ env.BUCKET_NAME }} + aws s3 cp aws-opentelemetry-java-layer.zip s3://${{ env.BUCKET_NAME }} + layerARN=$( + aws lambda publish-layer-version \ + --layer-name ${{ env.LAYER_NAME }} \ + --content S3Bucket=${{ env.BUCKET_NAME }},S3Key=aws-opentelemetry-java-layer.zip \ + --compatible-runtimes java11 java17 java21 \ + --compatible-architectures "arm64" "x86_64" \ + --license-info "Apache-2.0" \ + --description "AWS Distro of OpenTelemetry Lambda Layer for Java Runtime" \ + --query 'LayerVersionArn' \ + --output text + ) + echo $layerARN + echo "LAYER_ARN=${layerARN}" >> $GITHUB_ENV + mkdir ${{ env.LAYER_NAME }} + echo $layerARN > ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} + cat ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} + + - name: public layer + run: | + layerVersion=$( + aws lambda list-layer-versions \ + --layer-name ${{ env.LAYER_NAME }} \ + --query 'max_by(LayerVersions, &Version).Version' + ) + aws lambda add-layer-version-permission \ + --layer-name ${{ env.LAYER_NAME }} \ + --version-number $layerVersion \ + --principal "*" \ + --statement-id publish \ + --action lambda:GetLayerVersion + + - name: upload layer arn artifact + if: ${{ success() }} + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 + with: + name: ${{ env.LAYER_NAME }}-${{ matrix.aws_region }} + path: ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} - - name: Create release + - name: clean s3 + if: always() + run: | + aws s3 rb --force s3://${{ env.BUCKET_NAME }} + + generate-lambda-release-note: + runs-on: ubuntu-latest + needs: publish-layer-prod + outputs: + layer-note: ${{ steps.layer-note.outputs.layer-note }} + steps: + - name: Checkout Repo @ SHA - ${{ github.sha }} + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 + - name: download layerARNs + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 + with: + pattern: ${{ env.LAYER_NAME }}-* + path: ${{ env.LAYER_NAME }} + merge-multiple: true + - name: show layerARNs + run: | + for file in ${{ env.LAYER_NAME }}/* + do + echo $file + cat $file + done + - name: generate layer-note + id: layer-note + working-directory: ${{ env.LAYER_NAME }} + run: | + echo "| Region | Layer ARN |" >> ../layer-note + echo "| ---- | ---- |" >> ../layer-note + for file in * + do + read arn < $file + echo "| " $file " | " $arn " |" >> ../layer-note + done + cd .. + { + echo "layer-note<> $GITHUB_OUTPUT + cat layer-note + - name: generate tf layer + working-directory: ${{ env.LAYER_NAME }} + run: | + echo "locals {" >> ../layer_arns.tf + echo " sdk_layer_arns = {" >> ../layer_arns.tf + for file in * + do + read arn < $file + echo " \""$file"\" = \""$arn"\"" >> ../layer_arns.tf + done + cd .. + echo " }" >> layer_arns.tf + echo "}" >> layer_arns.tf + terraform fmt layer_arns.tf + cat layer_arns.tf + - name: generate layer ARN constants for CDK + working-directory: ${{ env.LAYER_NAME }} + run: | + echo "{" > ../layer_cdk + for file in *; do + read arn < "$file" + echo " \"$file\": \"$arn\"," >> ../layer_cdk + done + echo "}" >> ../layer_cdk + cat ../layer_cdk + + publish-github: + needs: generate-lambda-release-note + runs-on: ubuntu-latest + steps: + - name: Checkout Repo @ SHA - ${{ github.sha }} + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + + - name: Download SDK artifact + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 + with: + name: ${{ env.ARTIFACT_NAME }} + + - name: Download layer.zip artifact + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 + with: + name: layer.zip + + - name: Rename artifacts env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token + VERSION: ${{ github.event.inputs.version }} run: | - # Download layer.zip from existing latest tagged SDK release note - LATEST_SDK_VERSION=$(gh release list --repo "aws-observability/aws-otel-java-instrumentation" --json tagName,isLatest -q 'map(select(.isLatest==true)) | .[0].tagName') - mkdir -p layer_artifact - gh release download "$LATEST_SDK_VERSION" --repo "aws-observability/aws-otel-java-instrumentation" --pattern "layer.zip" --dir layer_artifact - shasum -a 256 layer_artifact/layer.zip > layer_artifact/layer.zip.sha256 + cp "aws-opentelemetry-agent-$VERSION.jar" ${{ env.ARTIFACT_NAME }} + cp aws-opentelemetry-java-layer.zip layer.zip + + # Publish to GitHub releases + - name: Create GH release + id: create_release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + VERSION: ${{ github.event.inputs.version }} + run: | + # Extract versions from dependency files + OTEL_INSTRUMENTATION_VERSION=$(grep "val otelVersion" dependencyManagement/build.gradle.kts | sed 's/.*= "\([^"]*\)".*/\1/') + OTEL_CONTRIB_VERSION=$(grep "io.opentelemetry.contrib:opentelemetry-aws-xray" dependencyManagement/build.gradle.kts | sed 's/.*:\([^"]*\)".*/\1/') + + # Extract CHANGELOG entries for this version + CHANGELOG_ENTRIES=$(python3 -c " + import re, os + version = os.environ['VERSION'] + with open('CHANGELOG.md', 'r') as f: + content = f.read() + version_pattern = rf'## v{re.escape(version)}.*?\n(.*?)(?=\n## |\Z)' + version_match = re.search(version_pattern, content, re.DOTALL) + if version_match: + entries = version_match.group(1).strip() + if entries: + print(entries) + ") + + # Create release notes + cat > release_notes.md << EOF + $(if [ -n "$CHANGELOG_ENTRIES" ]; then echo "## What's Changed"; echo "$CHANGELOG_ENTRIES"; echo ""; fi) + + ## Upstream Components + + - \`OpenTelemetry Java Contrib\` - $OTEL_CONTRIB_VERSION + - \`Opentelemetry Instrumentation for Java\` - $OTEL_INSTRUMENTATION_VERSION + + ## Release Artifacts + + This release publishes to public ECR and Maven Central. + * See ADOT Java auto-instrumentation Docker image v$VERSION in our public ECR repository: + https://gallery.ecr.aws/aws-observability/adot-autoinstrumentation-java + * See version $VERSION in our Maven Central repository: + https://central.sonatype.com/artifact/software.amazon.opentelemetry/aws-opentelemetry-agent + + ## Lambda Layer + + This release includes the AWS OpenTelemetry Lambda Layer for Java version $VERSION-$(echo $GITHUB_SHA | cut -c1-7). + + Lambda Layer ARNs: + ${{ needs.generate-lambda-release-note.outputs.layer-note }} + EOF + + shasum -a 256 ${{ env.ARTIFACT_NAME }} > ${{ env.ARTIFACT_NAME }}.sha256 + shasum -a 256 layer.zip > layer.zip.sha256 gh release create --target "$GITHUB_REF_NAME" \ - --title "Release v${{ github.event.inputs.version }}" \ + --title "Release v$VERSION" \ + --notes-file release_notes.md \ --draft \ - "v${{ github.event.inputs.version }}" \ + "v$VERSION" \ ${{ env.ARTIFACT_NAME }} \ ${{ env.ARTIFACT_NAME }}.sha256 \ - layer_artifact/layer.zip \ - layer_artifact/layer.zip.sha256 + layer.zip \ + layer.zip.sha256 diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml deleted file mode 100644 index ede635ec91..0000000000 --- a/.github/workflows/release-lambda.yml +++ /dev/null @@ -1,255 +0,0 @@ -name: Release Java Lambda layer - -on: - workflow_dispatch: - inputs: - version: - description: The version to tag the lambda release with, e.g., 1.2.0 - required: true - aws_region: - description: 'Deploy to aws regions' - required: true - default: 'us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1, af-south-1, ap-east-1, ap-south-2, ap-southeast-3, ap-southeast-4, eu-central-2, eu-south-1, eu-south-2, il-central-1, me-central-1, me-south-1, ap-southeast-5, ap-southeast-7, mx-central-1, ca-west-1, cn-north-1, cn-northwest-1' - -env: - # Legacy list of commercial regions to deploy to. New regions should NOT be added here, and instead should be added to the `aws_region` default input to the workflow. - LEGACY_COMMERCIAL_REGIONS: us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1 - LAYER_NAME: AWSOpenTelemetryDistroJava - -permissions: - id-token: write - contents: write - -jobs: - build-layer: - environment: Release - runs-on: ubuntu-latest - outputs: - aws_regions_json: ${{ steps.set-matrix.outputs.aws_regions_json }} - steps: - - name: Set up regions matrix - id: set-matrix - run: | - IFS=',' read -ra REGIONS <<< "${{ github.event.inputs.aws_region }}" - MATRIX="[" - for region in "${REGIONS[@]}"; do - trimmed_region=$(echo "$region" | xargs) - MATRIX+="\"$trimmed_region\"," - done - MATRIX="${MATRIX%,}]" - echo ${MATRIX} - echo "aws_regions_json=${MATRIX}" >> $GITHUB_OUTPUT - - - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - uses: actions/setup-java@v4 - with: - java-version: 17 - distribution: 'temurin' - - - name: Build layers - working-directory: lambda-layer - run: | - ./build-layer.sh - - - name: Upload layer - uses: actions/upload-artifact@v4 - with: - name: aws-opentelemetry-java-layer.zip - path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip - - publish-prod: - runs-on: ubuntu-latest - needs: build-layer - strategy: - matrix: - aws_region: ${{ fromJson(needs.build-layer.outputs.aws_regions_json) }} - steps: - - name: role arn - env: - LEGACY_COMMERCIAL_REGIONS: ${{ env.LEGACY_COMMERCIAL_REGIONS }} - run: | - LEGACY_COMMERCIAL_REGIONS_ARRAY=(${LEGACY_COMMERCIAL_REGIONS//,/ }) - FOUND=false - for REGION in "${LEGACY_COMMERCIAL_REGIONS_ARRAY[@]}"; do - if [[ "$REGION" == "${{ matrix.aws_region }}" ]]; then - FOUND=true - break - fi - done - if [ "$FOUND" = true ]; then - echo "Found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" - SECRET_KEY="LAMBDA_LAYER_RELEASE" - else - echo "Not found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" - SECRET_KEY="${{ matrix.aws_region }}_LAMBDA_LAYER_RELEASE" - fi - SECRET_KEY=${SECRET_KEY//-/_} - echo "SECRET_KEY=${SECRET_KEY}" >> $GITHUB_ENV - - - uses: aws-actions/configure-aws-credentials@v4.0.2 - with: - role-to-assume: ${{ secrets[env.SECRET_KEY] }} - role-duration-seconds: 1200 - aws-region: ${{ matrix.aws_region }} - - - name: Get s3 bucket name for release - run: | - echo BUCKET_NAME=java-lambda-layer-${{ github.run_id }}-${{ matrix.aws_region }} | tee --append $GITHUB_ENV - - - name: download layer.zip - uses: actions/download-artifact@v4 - with: - name: aws-opentelemetry-java-layer.zip - - - name: publish - run: | - aws s3 mb s3://${{ env.BUCKET_NAME }} - aws s3 cp aws-opentelemetry-java-layer.zip s3://${{ env.BUCKET_NAME }} - layerARN=$( - aws lambda publish-layer-version \ - --layer-name ${{ env.LAYER_NAME }} \ - --content S3Bucket=${{ env.BUCKET_NAME }},S3Key=aws-opentelemetry-java-layer.zip \ - --compatible-runtimes java11 java17 java21 \ - --compatible-architectures "arm64" "x86_64" \ - --license-info "Apache-2.0" \ - --description "AWS Distro of OpenTelemetry Lambda Layer for Java Runtime" \ - --query 'LayerVersionArn' \ - --output text - ) - echo $layerARN - echo "LAYER_ARN=${layerARN}" >> $GITHUB_ENV - mkdir ${{ env.LAYER_NAME }} - echo $layerARN > ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} - cat ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} - - - name: public layer - run: | - layerVersion=$( - aws lambda list-layer-versions \ - --layer-name ${{ env.LAYER_NAME }} \ - --query 'max_by(LayerVersions, &Version).Version' - ) - aws lambda add-layer-version-permission \ - --layer-name ${{ env.LAYER_NAME }} \ - --version-number $layerVersion \ - --principal "*" \ - --statement-id publish \ - --action lambda:GetLayerVersion - - - name: upload layer arn artifact - if: ${{ success() }} - uses: actions/upload-artifact@v4 - with: - name: ${{ env.LAYER_NAME }}-${{ matrix.aws_region }} - path: ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} - - - name: clean s3 - if: always() - run: | - aws s3 rb --force s3://${{ env.BUCKET_NAME }} - - generate-release-note: - runs-on: ubuntu-latest - needs: publish-prod - steps: - - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 - - - uses: hashicorp/setup-terraform@v2 - - - name: download layerARNs - uses: actions/download-artifact@v4 - with: - pattern: ${{ env.LAYER_NAME }}-* - path: ${{ env.LAYER_NAME }} - merge-multiple: true - - - name: show layerARNs - run: | - for file in ${{ env.LAYER_NAME }}/* - do - echo $file - cat $file - done - - - name: generate layer-note - working-directory: ${{ env.LAYER_NAME }} - run: | - echo "| Region | Layer ARN |" >> ../layer-note - echo "| ---- | ---- |" >> ../layer-note - for file in * - do - read arn < $file - echo "| " $file " | " $arn " |" >> ../layer-note - done - cat ../layer-note - - name: generate tf layer - working-directory: ${{ env.LAYER_NAME }} - run: | - echo "locals {" >> ../layer_arns.tf - echo " sdk_layer_arns = {" >> ../layer_arns.tf - for file in * - do - read arn < $file - echo " \""$file"\" = \""$arn"\"" >> ../layer_arns.tf - done - cd .. - echo " }" >> layer_arns.tf - echo "}" >> layer_arns.tf - terraform fmt layer_arns.tf - cat layer_arns.tf - - name: generate layer ARN constants for CDK - working-directory: ${{ env.LAYER_NAME }} - run: | - echo "{" > ../layer_cdk - for file in *; do - read arn < "$file" - echo " \"$file\": \"$arn\"," >> ../layer_cdk - done - echo "}" >> ../layer_cdk - cat ../layer_cdk - - name: download aws-opentelemetry-java-layer.zip - uses: actions/download-artifact@v4 - with: - name: aws-opentelemetry-java-layer.zip - - name: rename to layer.zip - run: | - mv aws-opentelemetry-java-layer.zip layer.zip - - name: Get commit hash - id: commit - run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - - name: Create Release Notes - run: | - echo "AWS OpenTelemetry Lambda Layer for Java version ${{ github.event.inputs.version }}-${{ steps.commit.outputs.sha_short }}" > release_notes.md - echo "" >> release_notes.md - echo "" >> release_notes.md - echo "See new Lambda Layer ARNs:" >> release_notes.md - echo "" >> release_notes.md - cat layer-note >> release_notes.md - echo "" >> release_notes.md - echo "Notes:" >> release_notes.md - - name: Create GH release - id: create_release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token - run: | - gh release create --target "$GITHUB_REF_NAME" \ - --title "Release lambda-v${{ github.event.inputs.version }}-${{ steps.commit.outputs.sha_short }}" \ - --notes-file release_notes.md \ - --draft \ - "lambda-v${{ github.event.inputs.version }}-${{ steps.commit.outputs.sha_short }}" \ - layer_arns.tf layer.zip - echo Removing release_notes.md ... - rm -f release_notes.md - - name: Upload layer.zip and SHA-256 checksum to SDK Release Notes (tagged with latest) - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - LATEST_SDK_VERSION=$(gh release list --repo "aws-observability/aws-otel-java-instrumentation" --json tagName,isLatest -q 'map(select(.isLatest==true)) | .[0].tagName') - # Generate SHA-256 checksum for layer.zip - shasum -a 256 layer.zip > layer.zip.sha256 - # Upload layer.zip and its checksum to the latest SDK release note - gh release upload "$LATEST_SDK_VERSION" layer.zip layer.zip.sha256 --repo "aws-observability/aws-otel-java-instrumentation" --clobber - echo "✅ layer.zip successfully uploaded to $LATEST_SDK_VERSION in the upstream repo!" diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index e200a7c3a9..262683289d 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -26,12 +26,12 @@ jobs: needs: validate-udp-exporter-e2e-test steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - name: Set up Java - uses: actions/setup-java@v3 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: '17' + java-version-file: .java-version distribution: 'temurin' cache: 'gradle' diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index c007d3fb4e..2dcd07d709 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -63,7 +63,7 @@ jobs: run: | echo "TEST_DURATION_MINUTES=${{ github.event.inputs.test_duration_minutes || env.DEFAULT_TEST_DURATION_MINUTES }}" | tee --append $GITHUB_ENV; - name: Clone This Repo @ ${{ env.TARGET_SHA }} - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ env.TARGET_SHA }} @@ -98,7 +98,7 @@ jobs: # MARK: - Run Performance Tests - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} role-duration-seconds: 21600 # 6 Hours @@ -110,7 +110,7 @@ jobs: aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws - name: Build Sample App locally directly to the Docker daemon - uses: burrunan/gradle-cache-action@v1 + uses: burrunan/gradle-cache-action@4a07779efc8120348ea6dfd35314bc30a586eb0f #v3.0.1 with: arguments: jibDockerBuild env: @@ -210,7 +210,7 @@ jobs: git checkout main; [[ $HAS_RESULTS_ALREADY == true ]] - name: Graph and Report Performance Test Averages result - uses: benchmark-action/github-action-benchmark@v1 + uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b #v1.20.7 continue-on-error: true id: check-failure-after-performance-tests with: @@ -230,7 +230,7 @@ jobs: gh-pages-branch: gh-pages benchmark-data-dir-path: soak-tests/per-commit-overall-results - name: Publish Issue if failed DURING Performance Tests - uses: JasonEtco/create-an-issue@v2 + uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 if: ${{ github.event_name == 'schedule' && steps.check-failure-during-performance-tests.outcome == 'failure' }} env: @@ -241,7 +241,7 @@ jobs: filename: .github/auto-issue-templates/failure-during-soak_tests.md update_existing: true - name: Publish Issue if failed AFTER Performance Tests - uses: JasonEtco/create-an-issue@v2 + uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 #v2.9.2 if: ${{ github.event_name == 'schedule' && steps.check-failure-after-performance-tests.outcome == 'failure' }} env: diff --git a/.github/workflows/stale-bot.yml b/.github/workflows/stale-bot.yml index 2104ad0b4f..49ddc47c00 100644 --- a/.github/workflows/stale-bot.yml +++ b/.github/workflows/stale-bot.yml @@ -22,7 +22,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Mark the issues/pr - uses: actions/stale@v9 + uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f #10.0.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} #Github workflow will add a temporary token when executing the workflow with: diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index ead8a1f953..8a8b3aaa1b 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -13,17 +13,17 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Set up Java - uses: actions/setup-java@v3 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: - java-version: '17' + java-version-file: .java-version distribution: 'temurin' cache: 'gradle' - name: Configure AWS credentials for Testing Tracing - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0 with: role-to-assume: ${{ secrets.XRAY_UDP_EXPORTER_TEST_ROLE }} aws-region: 'us-east-1' @@ -51,8 +51,8 @@ jobs: run: | export XRAY_UDP_SPAN_EXPORTER_VERSION=${{ steps.build-udp-exporter.outputs.xrayUdpSpanExporterVersion }} echo "Running Sample App against X-Ray UDP Span Exporter version: $XRAY_UDP_SPAN_EXPORTER_VERSION" - gradle build - gradle bootRun & + ../../gradlew build + ../../gradlew bootRun & sleep 5 - name: Call Sample App Endpoint diff --git a/.gitignore b/.gitignore index 150fdce9ee..1f49e40488 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ build/ .DS_Store +.vscode + # Ignore Gradle GUI config gradle-app.setting diff --git a/.java-version b/.java-version new file mode 100644 index 0000000000..aabe6ec390 --- /dev/null +++ b/.java-version @@ -0,0 +1 @@ +21 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..b5b3250443 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +> **Note:** This CHANGELOG was created starting after version 2.11.5. Earlier changes are not documented here. + +For any change that affects end users of this package, please add an entry under the **Unreleased** section. Briefly summarize the change and provide the link to the PR. Example: + +- add SigV4 authentication for HTTP exporter + ([#1019](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1019)) + +If your change does not need a CHANGELOG entry, add the "skip changelog" label to your PR. + +## Unreleased + +### Enhancements + +- Support X-Ray Trace Id extraction from Lambda Context object, and respect user-configured OTEL_PROPAGATORS in AWS Lamdba instrumentation + ([#1191](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1191)) ([#1218](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1218)) +- Adaptive Sampling improvements: Ensure propagation of sampling rule across services and AWS accounts. Remove unnecessary B3 propagator. + ([#1201](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1201)) +- Add support for new formal database semantic convention keys. + ([#1162](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1162)) diff --git a/Dockerfile b/Dockerfile index 1390f9d2c5..f94b5ae10f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ # permissions and limitations under the License. # Stage 1: Build the cp-utility binary -FROM public.ecr.aws/docker/library/rust:1.86 as builder +FROM public.ecr.aws/docker/library/rust:1.89 AS builder WORKDIR /usr/src/cp-utility COPY ./tools/cp-utility . @@ -31,7 +31,7 @@ ARG TARGETARCH RUN if [ $TARGETARCH = "amd64" ]; then rustup component add rustfmt && cargo fmt --check ; fi ## Audit dependencies -RUN if [ $TARGETARCH = "amd64" ]; then cargo install cargo-audit && cargo audit ; fi +RUN if [ $TARGETARCH = "amd64" ]; then cargo install cargo-audit --locked && cargo audit ; fi # Cross-compile based on the target platform. diff --git a/appsignals-tests/contract-tests/build.gradle.kts b/appsignals-tests/contract-tests/build.gradle.kts index ac15939a62..0a90f08fb3 100644 --- a/appsignals-tests/contract-tests/build.gradle.kts +++ b/appsignals-tests/contract-tests/build.gradle.kts @@ -56,9 +56,9 @@ dependencies { testImplementation("software.amazon.awssdk:sts") testImplementation(kotlin("test")) implementation(project(":appsignals-tests:images:grpc:grpc-base")) - testImplementation("org.testcontainers:kafka:1.19.3") - testImplementation("org.testcontainers:postgresql:1.19.3") - testImplementation("org.testcontainers:mysql:1.19.8") + testImplementation("org.testcontainers:kafka:1.21.3") + testImplementation("org.testcontainers:postgresql:1.21.3") + testImplementation("org.testcontainers:mysql:1.21.3") testImplementation("com.mysql:mysql-connector-j:8.4.0") } diff --git a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java index 0b0fd6e7cd..b0871c6679 100644 --- a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java +++ b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java @@ -60,10 +60,10 @@ protected void assertMetrics() { protected long getThreshold(String metricName) { long threshold = 0; switch (metricName) { - // If maximum memory size is undefined, then value is -1 - // https://docs.oracle.com/en/java/javase/17/docs/api/java.management/java/lang/management/MemoryUsage.html#getMax() - // Thread count can be negative when excutor is null - // https://github.com/apache/tomcat/blob/1afe41491f0e56ec0a776db5ff84607f87ce6640/java/org/apache/tomcat/util/net/AbstractEndpoint.java#L1204 + // If maximum memory size is undefined, then value is -1 + // https://docs.oracle.com/en/java/javase/17/docs/api/java.management/java/lang/management/MemoryUsage.html#getMax() + // Thread count can be negative when excutor is null + // https://github.com/apache/tomcat/blob/1afe41491f0e56ec0a776db5ff84607f87ce6640/java/org/apache/tomcat/util/net/AbstractEndpoint.java#L1204 case JMXMetricsConstants.TOMCAT_THREADS: threshold = -2; break; diff --git a/appsignals-tests/images/http-servers/tomcat/build.gradle.kts b/appsignals-tests/images/http-servers/tomcat/build.gradle.kts index a9456b57ce..c770ec4bb4 100644 --- a/appsignals-tests/images/http-servers/tomcat/build.gradle.kts +++ b/appsignals-tests/images/http-servers/tomcat/build.gradle.kts @@ -31,7 +31,7 @@ application { } dependencies { - implementation("org.apache.tomcat.embed:tomcat-embed-core:10.1.10") + implementation("org.apache.tomcat.embed:tomcat-embed-core:11.0.6") implementation("io.opentelemetry:opentelemetry-api") } diff --git a/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts b/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts index 3b0a88eee4..d7555f4c7b 100644 --- a/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts +++ b/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts @@ -31,7 +31,7 @@ dependencies { implementation("org.slf4j:slf4j-api:2.0.9") implementation("org.slf4j:slf4j-simple:2.0.9") testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.2") - testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.10.1") + testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.13.4") } tasks.getByName("test") { diff --git a/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts b/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts index 57154c981b..f05d099a85 100644 --- a/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts +++ b/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts @@ -31,7 +31,7 @@ dependencies { implementation("org.slf4j:slf4j-api:2.0.9") implementation("org.slf4j:slf4j-simple:2.0.9") testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.2") - testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.10.1") + testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.13.4") } tasks.getByName("test") { diff --git a/awsagentprovider/build.gradle.kts b/awsagentprovider/build.gradle.kts index 148a77885a..5cc97b14fc 100644 --- a/awsagentprovider/build.gradle.kts +++ b/awsagentprovider/build.gradle.kts @@ -26,8 +26,10 @@ base { dependencies { compileOnly("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") - compileOnly("io.opentelemetry.semconv:opentelemetry-semconv:1.28.0-alpha") - testImplementation("io.opentelemetry.semconv:opentelemetry-semconv:1.28.0-alpha") + compileOnly("io.opentelemetry.semconv:opentelemetry-semconv") + compileOnly("io.opentelemetry.semconv:opentelemetry-semconv-incubating") + testImplementation("io.opentelemetry.semconv:opentelemetry-semconv") + testImplementation("io.opentelemetry.semconv:opentelemetry-semconv-incubating") compileOnly("com.google.errorprone:error_prone_annotations:2.19.1") compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi") compileOnly("org.slf4j:slf4j-api") diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java index a1349f06b5..d40e29a874 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java @@ -15,36 +15,46 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.DB_CONNECTION_STRING; -import static io.opentelemetry.semconv.SemanticAttributes.DB_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.DB_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.DB_STATEMENT; -import static io.opentelemetry.semconv.SemanticAttributes.DB_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.DB_USER; -import static io.opentelemetry.semconv.SemanticAttributes.FAAS_INVOKED_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.FAAS_TRIGGER; -import static io.opentelemetry.semconv.SemanticAttributes.GRAPHQL_OPERATION_TYPE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_REQUEST_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_URL; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.NETWORK_PEER_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.NETWORK_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.NET_PEER_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.NET_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.NET_SOCK_PEER_ADDR; -import static io.opentelemetry.semconv.SemanticAttributes.NET_SOCK_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.PEER_SERVICE; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SERVICE; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_SOCKET_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_SOCKET_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.URL_FULL; +import static io.opentelemetry.semconv.DbAttributes.DB_NAMESPACE; +import static io.opentelemetry.semconv.DbAttributes.DB_OPERATION_NAME; +import static io.opentelemetry.semconv.DbAttributes.DB_QUERY_TEXT; +import static io.opentelemetry.semconv.DbAttributes.DB_SYSTEM_NAME; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_ADDRESS; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_PORT; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; +import static io.opentelemetry.semconv.UrlAttributes.URL_FULL; +// These DB keys have been deprecated: +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv-incubating/src/main/java/io/opentelemetry/semconv/incubating/DbIncubatingAttributes.java#L322-L327 +// They have been replaced with new keys: +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv/src/main/java/io/opentelemetry/semconv/DbAttributes.java#L77 +// TODO: Delete deprecated keys once they no longer exist in binding version of the upstream code. +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_CONNECTION_STRING; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_NAME; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_USER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_NAME; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_TRIGGER; +import static io.opentelemetry.semconv.incubating.GraphqlIncubatingAttributes.GRAPHQL_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_STATUS_CODE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv-incubating/src/main/java/io/opentelemetry/semconv/incubating/MessagingIncubatingAttributes.java#L236-L242 +// Deprecated, use {@code messaging.operation.type} instead. +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_PORT; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_ADDR; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_PORT; +import static io.opentelemetry.semconv.incubating.PeerIncubatingAttributes.PEER_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_METHOD; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SERVICE; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.LAMBDA_APPLICATION_SIGNALS_REMOTE_ENVIRONMENT; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_AGENT_ID; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_AUTH_ACCESS_KEY; @@ -86,9 +96,11 @@ import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.UNKNOWN_OPERATION; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.UNKNOWN_REMOTE_OPERATION; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.UNKNOWN_REMOTE_SERVICE; +import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.getKeyValueWithFallback; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isAwsSDKSpan; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isDBSpan; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresent; +import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresentWithFallback; import com.amazonaws.arn.Arn; import io.opentelemetry.api.common.AttributeKey; @@ -99,8 +111,6 @@ import io.opentelemetry.sdk.trace.data.EventData; import io.opentelemetry.sdk.trace.data.ExceptionEventData; import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.semconv.ResourceAttributes; -import io.opentelemetry.semconv.SemanticAttributes; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URI; @@ -126,7 +136,6 @@ * represent "outgoing" traffic, and {@link SpanKind#INTERNAL} spans are ignored. */ final class AwsMetricAttributeGenerator implements MetricAttributeGenerator { - private static final Logger logger = Logger.getLogger(AwsMetricAttributeGenerator.class.getName()); @@ -280,18 +289,21 @@ private static void setRemoteServiceAndOperation(SpanData span, AttributesBuilde remoteOperation = getRemoteOperation(span, RPC_METHOD); } else if (isDBSpan(span)) { - remoteService = getRemoteService(span, DB_SYSTEM); - if (isKeyPresent(span, DB_OPERATION)) { - remoteOperation = getRemoteOperation(span, DB_OPERATION); + remoteService = getRemoteServiceWithFallback(span, DB_SYSTEM_NAME, DB_SYSTEM); + if (isKeyPresentWithFallback(span, DB_OPERATION_NAME, DB_OPERATION)) { + remoteOperation = getRemoteOperationWithFallback(span, DB_OPERATION_NAME, DB_OPERATION); } else { - remoteOperation = getDBStatementRemoteOperation(span, DB_STATEMENT); + String dbStatement = getKeyValueWithFallback(span, DB_QUERY_TEXT, DB_STATEMENT); + remoteOperation = getDBStatementRemoteOperation(span, dbStatement); } } else if (isKeyPresent(span, FAAS_INVOKED_NAME) || isKeyPresent(span, FAAS_TRIGGER)) { remoteService = getRemoteService(span, FAAS_INVOKED_NAME); remoteOperation = getRemoteOperation(span, FAAS_TRIGGER); - } else if (isKeyPresent(span, MESSAGING_SYSTEM) || isKeyPresent(span, MESSAGING_OPERATION)) { + } else if (isKeyPresent(span, MESSAGING_SYSTEM) + || isKeyPresentWithFallback(span, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) { remoteService = getRemoteService(span, MESSAGING_SYSTEM); - remoteOperation = getRemoteOperation(span, MESSAGING_OPERATION); + remoteOperation = + getRemoteOperationWithFallback(span, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); } else if (isKeyPresent(span, GRAPHQL_OPERATION_TYPE)) { remoteService = GRAPHQL; remoteOperation = getRemoteOperation(span, GRAPHQL_OPERATION_TYPE); @@ -341,11 +353,8 @@ private static void setRemoteEnvironment(SpanData span, AttributesBuilder builde */ private static String generateRemoteOperation(SpanData span) { String remoteOperation = UNKNOWN_REMOTE_OPERATION; - if (isKeyPresent(span, URL_FULL) || isKeyPresent(span, HTTP_URL)) { - String httpUrl = - isKeyPresent(span, URL_FULL) - ? span.getAttributes().get(URL_FULL) - : span.getAttributes().get(HTTP_URL); + if (isKeyPresentWithFallback(span, URL_FULL, HTTP_URL)) { + String httpUrl = getKeyValueWithFallback(span, URL_FULL, HTTP_URL); try { URL url; if (httpUrl != null) { @@ -356,11 +365,8 @@ private static String generateRemoteOperation(SpanData span) { logger.log(Level.FINEST, "invalid http.url attribute: ", httpUrl); } } - if (isKeyPresent(span, HTTP_REQUEST_METHOD) || isKeyPresent(span, HTTP_METHOD)) { - String httpMethod = - isKeyPresent(span, HTTP_REQUEST_METHOD) - ? span.getAttributes().get(HTTP_REQUEST_METHOD) - : span.getAttributes().get(HTTP_METHOD); + if (isKeyPresentWithFallback(span, HTTP_REQUEST_METHOD, HTTP_METHOD)) { + String httpMethod = getKeyValueWithFallback(span, HTTP_REQUEST_METHOD, HTTP_METHOD); remoteOperation = httpMethod + " " + remoteOperation; } if (remoteOperation.equals(UNKNOWN_REMOTE_OPERATION)) { @@ -448,8 +454,8 @@ private static String normalizeRemoteServiceName(SpanData span, String serviceNa case "AmazonSQS": // AWS SDK v1 case "Sqs": // AWS SDK v2 return NORMALIZED_SQS_SERVICE_NAME; - // For Bedrock, Bedrock Agent, and Bedrock Agent Runtime, we can align with AWS Cloud - // Control and use AWS::Bedrock for RemoteService. + // For Bedrock, Bedrock Agent, and Bedrock Agent Runtime, we can align with AWS Cloud + // Control and use AWS::Bedrock for RemoteService. case "AmazonBedrock": // AWS SDK v1 case "Bedrock": // AWS SDK v2 case "AWSBedrockAgentRuntime": // AWS SDK v1 @@ -457,8 +463,8 @@ private static String normalizeRemoteServiceName(SpanData span, String serviceNa case "AWSBedrockAgent": // AWS SDK v1 case "BedrockAgent": // AWS SDK v2 return NORMALIZED_BEDROCK_SERVICE_NAME; - // For BedrockRuntime, we are using AWS::BedrockRuntime as the associated remote resource - // (Model) is not listed in Cloud Control. + // For BedrockRuntime, we are using AWS::BedrockRuntime as the associated remote resource + // (Model) is not listed in Cloud Control. case "AmazonBedrockRuntime": // AWS SDK v1 case "BedrockRuntime": // AWS SDK v2 return NORMALIZED_BEDROCK_RUNTIME_SERVICE_NAME; @@ -768,7 +774,7 @@ private static Optional getSnsResourceNameFromArn(Optional strin * {address} attribute is retrieved in priority order: * - {@link SemanticAttributes#SERVER_ADDRESS}, * - {@link SemanticAttributes#NET_PEER_NAME}, - * - {@link SemanticAttributes#SERVER_SOCKET_ADDRESS} + * - {@link SemanticAttributes#NETWORK_PEER_ADDRESS} * - {@link SemanticAttributes#DB_CONNECTION_STRING}-Hostname * * @@ -776,7 +782,7 @@ private static Optional getSnsResourceNameFromArn(Optional strin * {port} attribute is retrieved in priority order: * - {@link SemanticAttributes#SERVER_PORT}, * - {@link SemanticAttributes#NET_PEER_PORT}, - * - {@link SemanticAttributes#SERVER_SOCKET_PORT} + * - {@link SemanticAttributes#NETWORK_PEER_PORT} * - {@link SemanticAttributes#DB_CONNECTION_STRING}-Port * * @@ -784,7 +790,7 @@ private static Optional getSnsResourceNameFromArn(Optional strin * provided. */ private static Optional getDbConnection(SpanData span) { - String dbName = span.getAttributes().get(DB_NAME); + String dbName = getKeyValueWithFallback(span, DB_NAMESPACE, DB_NAME); Optional dbConnection = Optional.empty(); if (isKeyPresent(span, SERVER_ADDRESS)) { @@ -795,9 +801,9 @@ private static Optional getDbConnection(SpanData span) { String networkPeerAddress = span.getAttributes().get(NET_PEER_NAME); Long networkPeerPort = span.getAttributes().get(NET_PEER_PORT); dbConnection = buildDbConnection(networkPeerAddress, networkPeerPort); - } else if (isKeyPresent(span, SERVER_SOCKET_ADDRESS)) { - String serverSocketAddress = span.getAttributes().get(SERVER_SOCKET_ADDRESS); - Long serverSocketPort = span.getAttributes().get(SERVER_SOCKET_PORT); + } else if (isKeyPresent(span, NETWORK_PEER_ADDRESS)) { + String serverSocketAddress = span.getAttributes().get(NETWORK_PEER_ADDRESS); + Long serverSocketPort = span.getAttributes().get(NETWORK_PEER_PORT); dbConnection = buildDbConnection(serverSocketAddress, serverSocketPort); } else if (isKeyPresent(span, DB_CONNECTION_STRING)) { String connectionString = span.getAttributes().get(DB_CONNECTION_STRING); @@ -942,6 +948,17 @@ private static String getRemoteService(SpanData span, AttributeKey remot return remoteService; } + static String getRemoteServiceWithFallback( + SpanData span, + AttributeKey remoteServiceKey, + AttributeKey remoteServiceFallbackKey) { + String remoteService = span.getAttributes().get(remoteServiceKey); + if (remoteService == null) { + return getRemoteService(span, remoteServiceFallbackKey); + } + return remoteService; + } + private static String getRemoteOperation(SpanData span, AttributeKey remoteOperationKey) { String remoteOperation = span.getAttributes().get(remoteOperationKey); if (remoteOperation == null) { @@ -950,15 +967,23 @@ private static String getRemoteOperation(SpanData span, AttributeKey rem return remoteOperation; } + static String getRemoteOperationWithFallback( + SpanData span, AttributeKey remoteOpKey, AttributeKey remoteOpFallbackKey) { + String remoteOp = span.getAttributes().get(remoteOpKey); + if (remoteOp == null) { + return getRemoteOperation(span, remoteOpFallbackKey); + } + return remoteOp; + } + /** * If no db.operation attribute provided in the span, we use db.statement to compute a valid * remote operation in a best-effort manner. To do this, we take the first substring of the * statement and compare to a regex list of known SQL keywords. The substring length is determined * by the longest known SQL keywords. */ - private static String getDBStatementRemoteOperation( - SpanData span, AttributeKey remoteOperationKey) { - String remoteOperation = span.getAttributes().get(remoteOperationKey); + private static String getDBStatementRemoteOperation(SpanData span, String dbStatement) { + String remoteOperation = dbStatement; if (remoteOperation == null) { remoteOperation = UNKNOWN_REMOTE_OPERATION; } diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java index d2decdc16c..01132a1919 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java @@ -15,7 +15,7 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.ResourceAttributes.SERVICE_NAME; +import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_LOCAL_SERVICE; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.UNKNOWN_SERVICE; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java index f6087017c5..c73e20e6dc 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java @@ -15,10 +15,10 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_STATUS_CODE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_STATUS_CODE; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_REMOTE_SERVICE; -import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresent; +import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.getKeyValueWithFallback; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.metrics.DoubleHistogram; @@ -152,12 +152,8 @@ public boolean isEndRequired() { // possible except for the throttle // https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/exporter/awsxrayexporter/internal/translator/cause.go#L121-L160 private void recordErrorOrFault(SpanData spanData, Attributes attributes) { - Long httpStatusCode = null; - if (isKeyPresent(spanData, HTTP_RESPONSE_STATUS_CODE)) { - httpStatusCode = spanData.getAttributes().get(HTTP_RESPONSE_STATUS_CODE); - } else if (isKeyPresent(spanData, HTTP_STATUS_CODE)) { - httpStatusCode = spanData.getAttributes().get(HTTP_STATUS_CODE); - } + Long httpStatusCode = + getKeyValueWithFallback(spanData, HTTP_RESPONSE_STATUS_CODE, HTTP_STATUS_CODE); StatusCode statusCode = spanData.getStatus().getStatusCode(); if (httpStatusCode == null) { diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java index 539c863e23..ab92d050be 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java @@ -15,16 +15,20 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.DB_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.DB_STATEMENT; -import static io.opentelemetry.semconv.SemanticAttributes.DB_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_REQUEST_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_TARGET; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.URL_PATH; +import static io.opentelemetry.semconv.DbAttributes.DB_OPERATION_NAME; +import static io.opentelemetry.semconv.DbAttributes.DB_QUERY_TEXT; +import static io.opentelemetry.semconv.DbAttributes.DB_SYSTEM_NAME; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.AWS_LAMBDA_FUNCTION_NAME_CONFIG; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.isLambdaEnvironment; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_LAMBDA_LOCAL_OPERATION_OVERRIDE; @@ -113,6 +117,10 @@ static String getIngressOperation(SpanData span) { if (operationOverride != null) { return operationOverride; } + String op = generateIngressOperation(span); + if (!op.equals(UNKNOWN_OPERATION)) { + return op; + } return getFunctionNameFromEnv() + "/FunctionHandler"; } String operation = span.getName(); @@ -158,6 +166,23 @@ static boolean isKeyPresent(SpanData span, AttributeKey key) { return span.getAttributes().get(key) != null; } + static boolean isKeyPresentWithFallback( + SpanData span, AttributeKey key, AttributeKey fallbackKey) { + if (span.getAttributes().get(key) != null) { + return true; + } + return isKeyPresent(span, fallbackKey); + } + + static T getKeyValueWithFallback( + SpanData span, AttributeKey key, AttributeKey fallbackKey) { + T value = span.getAttributes().get(key); + if (value != null) { + return value; + } + return span.getAttributes().get(fallbackKey); + } + static boolean isAwsSDKSpan(SpanData span) { // https://opentelemetry.io/docs/specs/otel/trace/semantic_conventions/instrumentation/aws-sdk/#common-attributes return "aws-api".equals(span.getAttributes().get(RPC_SYSTEM)); @@ -175,7 +200,8 @@ static boolean shouldGenerateDependencyMetricAttributes(SpanData span) { } static boolean isConsumerProcessSpan(SpanData spanData) { - String messagingOperation = spanData.getAttributes().get(MESSAGING_OPERATION); + String messagingOperation = + getKeyValueWithFallback(spanData, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); return SpanKind.CONSUMER.equals(spanData.getKind()) && PROCESS.equals(messagingOperation); } @@ -197,7 +223,8 @@ static boolean isLocalRoot(SpanData spanData) { private static boolean isSqsReceiveMessageConsumerSpan(SpanData spanData) { String spanName = spanData.getName(); SpanKind spanKind = spanData.getKind(); - String messagingOperation = spanData.getAttributes().get(MESSAGING_OPERATION); + String messagingOperation = + getKeyValueWithFallback(spanData, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); InstrumentationScopeInfo instrumentationScopeInfo = spanData.getInstrumentationScopeInfo(); return SQS_RECEIVE_MESSAGE_SPAN_NAME.equalsIgnoreCase(spanName) @@ -271,9 +298,9 @@ private static String generateIngressOperation(SpanData span) { // Check if the current Span adheres to database semantic conventions static boolean isDBSpan(SpanData span) { - return isKeyPresent(span, DB_SYSTEM) - || isKeyPresent(span, DB_OPERATION) - || isKeyPresent(span, DB_STATEMENT); + return isKeyPresentWithFallback(span, DB_SYSTEM_NAME, DB_SYSTEM) + || isKeyPresentWithFallback(span, DB_OPERATION_NAME, DB_OPERATION) + || isKeyPresentWithFallback(span, DB_QUERY_TEXT, DB_STATEMENT); } static boolean isLambdaServerSpan(ReadableSpan span) { diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java index 102f411013..443fca8b49 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java @@ -15,9 +15,9 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SYSTEM; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import io.opentelemetry.api.common.AttributeKey; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java index 8b362193df..5a20b1efe2 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java @@ -15,9 +15,44 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.ResourceAttributes.SERVICE_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.*; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; +import static io.opentelemetry.semconv.DbAttributes.DB_NAMESPACE; +import static io.opentelemetry.semconv.DbAttributes.DB_OPERATION_NAME; +import static io.opentelemetry.semconv.DbAttributes.DB_QUERY_TEXT; +import static io.opentelemetry.semconv.DbAttributes.DB_SYSTEM_NAME; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_ADDRESS; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_PORT; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; +import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; +import static io.opentelemetry.semconv.UrlAttributes.URL_FULL; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_CONNECTION_STRING; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_NAME; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_USER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_NAME; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_PROVIDER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_TRIGGER; +import static io.opentelemetry.semconv.incubating.GraphqlIncubatingAttributes.GRAPHQL_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_PORT; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_ADDR; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_PORT; +import static io.opentelemetry.semconv.incubating.PeerIncubatingAttributes.PEER_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_METHOD; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -500,6 +535,8 @@ public void testRemoteAttributesCombinations() { // Validate behaviour of various combinations of DB attributes, then remove them. validateAndRemoveRemoteAttributes(DB_SYSTEM, "DB system", DB_OPERATION, "DB operation"); + validateAndRemoveRemoteAttributes( + DB_SYSTEM_NAME, "DB system name", DB_OPERATION_NAME, "DB operation name"); // Validate db.operation not exist, but db.statement exist, where SpanAttributes.DB_STATEMENT is // invalid @@ -509,6 +546,10 @@ public void testRemoteAttributesCombinations() { validateAndRemoveRemoteAttributes( DB_SYSTEM, "DB system", DB_OPERATION, UNKNOWN_REMOTE_OPERATION); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + validateAndRemoveRemoteAttributes( + DB_SYSTEM_NAME, "DB system name", DB_OPERATION_NAME, UNKNOWN_REMOTE_OPERATION); + // Validate both db.operation and db.statement not exist. mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_OPERATION, null); @@ -516,6 +557,10 @@ public void testRemoteAttributesCombinations() { validateAndRemoveRemoteAttributes( DB_SYSTEM, "DB system", DB_OPERATION, UNKNOWN_REMOTE_OPERATION); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + validateAndRemoveRemoteAttributes( + DB_SYSTEM_NAME, "DB system name", DB_OPERATION_NAME, UNKNOWN_REMOTE_OPERATION); + // Validate behaviour of various combinations of FAAS attributes, then remove them. validateAndRemoveRemoteAttributes( FAAS_INVOKED_NAME, "FAAS invoked name", FAAS_TRIGGER, "FAAS trigger name"); @@ -675,12 +720,22 @@ public void testGetDBStatementRemoteOperation() { mockAttribute(DB_STATEMENT, "SELECT DB statement"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "UPDATE DB statement"); + validateExpectedRemoteAttributes("DB system name", "UPDATE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 2: More than 1 valid keywords match, we want to pick the longest match mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "DROP VIEW DB statement"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "DROP VIEW"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "DROP TABLE myTable"); + validateExpectedRemoteAttributes("DB system name", "DROP TABLE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 3: More than 1 valid keywords match, but the other keywords is not // at the start of the SpanAttributes.DB_STATEMENT. We want to only pick start match @@ -688,48 +743,90 @@ public void testGetDBStatementRemoteOperation() { mockAttribute(DB_STATEMENT, "SELECT data FROM domains"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "UPDATE domains SET data = 'newdomain.com' WHERE id = 1"); + validateExpectedRemoteAttributes("DB system name", "UPDATE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 4: Have valid keywords,but it is not at the start of SpanAttributes.DB_STATEMENT mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "invalid SELECT DB statement"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", UNKNOWN_REMOTE_OPERATION); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "bad sql UPDATE domains SET data = 'newdomain.com' WHERE id = 1"); + validateExpectedRemoteAttributes("DB system name", UNKNOWN_REMOTE_OPERATION); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 5: Have valid keywords, match the longest word mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "UUID"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "UUID"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "COUNT"); + validateExpectedRemoteAttributes("DB system name", "COUNT"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 6: Have valid keywords, match with first word mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "FROM SELECT *"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "FROM"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "COUNT SELECT *"); + validateExpectedRemoteAttributes("DB system name", "COUNT"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 7: Have valid keyword, match with first word mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "SELECT FROM *"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "SELECT COUNT(*) FROM domains"); + validateExpectedRemoteAttributes("DB system name", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 8: Have valid keywords, match with upper case mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "seLeCt *"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "upDate domains SET data = 'newdomain.com' WHERE id = 1"); + validateExpectedRemoteAttributes("DB system name", "UPDATE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 9: Both DB_OPERATION and DB_STATEMENT are set but the former takes precedence mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, "SELECT FROM *"); mockAttribute(DB_OPERATION, "DB operation"); validateExpectedRemoteAttributes("DB system", "DB operation"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, "INSERT INTO mytable VALUES ('newdomain.com')"); + mockAttribute(DB_OPERATION_NAME, "DB operation name"); + validateExpectedRemoteAttributes("DB system name", "DB operation name"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); + mockAttribute(DB_OPERATION_NAME, null); // Case 10: Duplicate of case 1 with leading whitespace mockAttribute(DB_SYSTEM, "DB system"); mockAttribute(DB_STATEMENT, " SELECT DB statement"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "SELECT"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, " UPDATE DB statement"); + validateExpectedRemoteAttributes("DB system name", "UPDATE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); // Case 11: Duplicate of case 2 with leading whitespace. Test if whitespace affects longest // match @@ -737,6 +834,11 @@ public void testGetDBStatementRemoteOperation() { mockAttribute(DB_STATEMENT, " DROP VIEW DB statement"); mockAttribute(DB_OPERATION, null); validateExpectedRemoteAttributes("DB system", "DROP VIEW"); + mockAttribute(DB_SYSTEM_NAME, "DB system name"); + mockAttribute(DB_QUERY_TEXT, " DROP TABLE tableFoo"); + validateExpectedRemoteAttributes("DB system name", "DROP TABLE"); + mockAttribute(DB_SYSTEM_NAME, null); + mockAttribute(DB_QUERY_TEXT, null); } @Test @@ -1159,7 +1261,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(SERVER_ADDRESS, "abc.com"); mockAttribute(SERVER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com|3306"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com|3306"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(SERVER_ADDRESS, null); mockAttribute(SERVER_PORT, null); @@ -1169,7 +1274,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(SERVER_ADDRESS, "abc.com"); mockAttribute(SERVER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name^|special|abc.com|3306"); + mockAttribute(DB_NAMESPACE, "db_namespace|special"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace^|special|abc.com|3306"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(SERVER_ADDRESS, null); mockAttribute(SERVER_PORT, null); @@ -1179,7 +1287,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(SERVER_ADDRESS, "abc.com"); mockAttribute(SERVER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name^^special|abc.com|3306"); + mockAttribute(DB_NAMESPACE, "db_namespace^special"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace^^special|abc.com|3306"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(SERVER_ADDRESS, null); mockAttribute(SERVER_PORT, null); @@ -1187,7 +1298,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(DB_NAME, "db_name"); mockAttribute(SERVER_ADDRESS, "abc.com"); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(SERVER_ADDRESS, null); // Validate behaviour of SERVER_ADDRESS exist, then remove it. @@ -1209,7 +1323,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(NET_PEER_NAME, "abc.com"); mockAttribute(NET_PEER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com|3306"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com|3306"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(NET_PEER_NAME, null); mockAttribute(NET_PEER_PORT, null); @@ -1217,7 +1334,10 @@ public void testDBClientSpanWithRemoteResourceAttributes() { mockAttribute(DB_NAME, "db_name"); mockAttribute(NET_PEER_NAME, "abc.com"); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com"); mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(NET_PEER_NAME, null); // Validate behaviour of NET_PEER_NAME exist, then remove it. @@ -1234,31 +1354,37 @@ public void testDBClientSpanWithRemoteResourceAttributes() { assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); mockAttribute(NET_PEER_PORT, null); - // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS and SERVER_SOCKET_PORT exist, then + // Validate behaviour of DB_NAME, NETWORK_PEER_ADDRESS and NETWORK_PEER_PORT exist, then // remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(SERVER_SOCKET_ADDRESS, "abc.com"); - mockAttribute(SERVER_SOCKET_PORT, 3306L); + mockAttribute(NETWORK_PEER_ADDRESS, "abc.com"); + mockAttribute(NETWORK_PEER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com|3306"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com|3306"); mockAttribute(DB_NAME, null); - mockAttribute(SERVER_SOCKET_ADDRESS, null); - mockAttribute(SERVER_SOCKET_PORT, null); + mockAttribute(DB_NAMESPACE, null); + mockAttribute(NETWORK_PEER_ADDRESS, null); + mockAttribute(NETWORK_PEER_PORT, null); - // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS exist, then remove it. + // Validate behaviour of DB_NAME, NETWORK_PEER_ADDRESS exist, then remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(SERVER_SOCKET_ADDRESS, "abc.com"); + mockAttribute(NETWORK_PEER_ADDRESS, "abc.com"); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com"); + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes("DB::Connection", "db_namespace|abc.com"); mockAttribute(DB_NAME, null); - mockAttribute(SERVER_SOCKET_ADDRESS, null); + mockAttribute(DB_NAMESPACE, null); + mockAttribute(NETWORK_PEER_ADDRESS, null); - // Validate behaviour of SERVER_SOCKET_PORT exist, then remove it. - mockAttribute(SERVER_SOCKET_PORT, 3306L); + // Validate behaviour of NETWORK_PEER_PORT exist, then remove it. + mockAttribute(NETWORK_PEER_PORT, 3306L); when(spanDataMock.getKind()).thenReturn(SpanKind.CLIENT); actualAttributes = GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); - mockAttribute(SERVER_SOCKET_PORT, null); + mockAttribute(NETWORK_PEER_PORT, null); // Validate behaviour of only DB_NAME exist, then remove it. mockAttribute(DB_NAME, "db_name"); @@ -1267,7 +1393,15 @@ public void testDBClientSpanWithRemoteResourceAttributes() { GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); + + mockAttribute(DB_NAMESPACE, "db_namespace"); + actualAttributes = + GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); + assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); + assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); + mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); // Validate behaviour of DB_NAME and DB_CONNECTION_STRING exist, then remove it. mockAttribute(DB_NAME, "db_name"); @@ -1276,7 +1410,14 @@ public void testDBClientSpanWithRemoteResourceAttributes() { "mysql://test-apm.cluster-cnrw3s3ddo7n.us-east-1.rds.amazonaws.com:3306/petclinic"); validateRemoteResourceAttributes( "DB::Connection", "db_name|test-apm.cluster-cnrw3s3ddo7n.us-east-1.rds.amazonaws.com|3306"); + + mockAttribute(DB_NAMESPACE, "db_namespace"); + validateRemoteResourceAttributes( + "DB::Connection", + "db_namespace|test-apm.cluster-cnrw3s3ddo7n.us-east-1.rds.amazonaws.com|3306"); + mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(DB_CONNECTION_STRING, null); // Validate behaviour of DB_CONNECTION_STRING exist, then remove it. @@ -1300,7 +1441,15 @@ public void testDBClientSpanWithRemoteResourceAttributes() { GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); + + mockAttribute(DB_NAMESPACE, "db_namespace"); + actualAttributes = + GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); + assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); + assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); + mockAttribute(DB_NAME, null); + mockAttribute(DB_NAMESPACE, null); mockAttribute(DB_CONNECTION_STRING, null); mockAttribute(DB_SYSTEM, null); @@ -1584,6 +1733,36 @@ public void testDbUserPresentAndIsDbSpanFalse() { assertThat(actualAttributes.get(AWS_REMOTE_DB_USER)).isNull(); } + @Test + public void testGetRemoteOperationWithFallback_NewKeyPresent() { + mockAttribute(MESSAGING_OPERATION_TYPE, "send"); + mockAttribute(MESSAGING_OPERATION, "publish"); + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo("send"); + } + + @Test + public void testGetRemoteOperationWithFallback_DeprecatedKeyPresent() { + mockAttribute(MESSAGING_OPERATION, "publish"); + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo("publish"); + } + + @Test + public void testGetRemoteOperationWithFallback_BothKeysAbsent() { + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo(UNKNOWN_REMOTE_OPERATION); + } + @Test public void testNormalizeRemoteServiceName_NoNormalization() { String serviceName = "non aws service"; @@ -1731,4 +1910,34 @@ public void testBothMetricsWhenLocalRootConsumerProcess() { assertThat(attributeMap.get(SERVICE_METRIC)).isEqualTo(serviceAttributes); assertThat(attributeMap.get(DEPENDENCY_METRIC)).isEqualTo(dependencyAttributes); } + + @Test + public void testGetRemoteServiceWithFallback_PrimaryKeyPresent() { + mockAttribute(DB_SYSTEM_NAME, "mysql"); + mockAttribute(DB_SYSTEM, "postgresql"); + String result = + AwsMetricAttributeGenerator.getRemoteServiceWithFallback( + spanDataMock, DB_SYSTEM_NAME, DB_SYSTEM); + + assertThat(result).isEqualTo("mysql"); + } + + @Test + public void testGetRemoteServiceWithFallback_FallbackKeyPresent() { + mockAttribute(DB_SYSTEM, "postgresql"); + String result = + AwsMetricAttributeGenerator.getRemoteServiceWithFallback( + spanDataMock, DB_SYSTEM_NAME, DB_SYSTEM); + + assertThat(result).isEqualTo("postgresql"); + } + + @Test + public void testGetRemoteServiceWithFallback_BothKeysAbsent() { + String result = + AwsMetricAttributeGenerator.getRemoteServiceWithFallback( + spanDataMock, DB_SYSTEM_NAME, DB_SYSTEM); + + assertThat(result).isEqualTo(UNKNOWN_REMOTE_SERVICE); + } } diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java index 7f7b340fef..f2c0a9d01b 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java @@ -15,8 +15,8 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java index 8d13499e3a..ad436651c4 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java @@ -15,7 +15,7 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java index ea576a7303..b7b2286c2f 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java @@ -15,9 +15,13 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.*; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.RECEIVE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.RECEIVE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -365,6 +369,14 @@ public void testIsConsumerProcessSpanFalse() { assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isFalse(); } + @Test + public void testIsConsumerProcessSpanFalse_with_MESSAGING_OPERATION_TYPE() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(RECEIVE); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(PROCESS); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isFalse(); + } + @Test public void testIsConsumerProcessSpanTrue() { when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(PROCESS); @@ -372,6 +384,14 @@ public void testIsConsumerProcessSpanTrue() { assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isTrue(); } + @Test + public void testIsConsumerProcessSpanTrue_with_MESSAGING_OPERATION_TYPE() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(PROCESS); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(RECEIVE); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isTrue(); + } + // check that AWS SDK v1 SQS ReceiveMessage consumer spans metrics are suppressed @Test public void testNoMetricAttributesForSqsConsumerSpanAwsSdkV1() { @@ -436,6 +456,26 @@ public void testNoMetricAttributesForAwsSdkSqsConsumerProcessSpan() { .isTrue(); } + @Test + public void + testNoMetricAttributesForAwsSdkSqsConsumerProcessSpan_with_MESSAGING_OPERATION_TYPE() { + InstrumentationScopeInfo instrumentationScopeInfo = mock(InstrumentationScopeInfo.class); + when(instrumentationScopeInfo.getName()).thenReturn("io.opentelemetry.aws-sdk-2.2"); + when(spanDataMock.getInstrumentationScopeInfo()).thenReturn(instrumentationScopeInfo); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + when(spanDataMock.getName()).thenReturn("Sqs.ReceiveMessage"); + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(PROCESS); + + assertThat(AwsSpanProcessingUtil.shouldGenerateServiceMetricAttributes(spanDataMock)).isFalse(); + assertThat(AwsSpanProcessingUtil.shouldGenerateDependencyMetricAttributes(spanDataMock)) + .isFalse(); + + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(RECEIVE); + assertThat(AwsSpanProcessingUtil.shouldGenerateServiceMetricAttributes(spanDataMock)).isTrue(); + assertThat(AwsSpanProcessingUtil.shouldGenerateDependencyMetricAttributes(spanDataMock)) + .isTrue(); + } + @Test public void testSqlDialectKeywordsOrder() { List keywords = getDialectKeywords(); @@ -455,6 +495,59 @@ public void testSqlDialectKeywordsMaxLength() { } } + @Test + public void testIsKeyPresentWithFallback_NewKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isTrue(); + } + + @Test + public void testIsKeyPresentWithFallback_DeprecatedKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isTrue(); + } + + @Test + public void testIsKeyPresentWithFallback_BothKeysAbsent() { + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isFalse(); + } + + @Test + public void testGetKeyValueWithFallback_NewKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn("send"); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isEqualTo("send"); + } + + @Test + public void testGetKeyValueWithFallback_DeprecatedKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isEqualTo("publish"); + } + + @Test + public void testGetKeyValueWithFallback_BothKeysAbsent() { + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isNull(); + } + @Test public void testIsLambdaServerSpan_withLambdaScope() { ReadableSpan span = mock(ReadableSpan.class); diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java index 2a1cea7106..b070d53bba 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; import io.opentelemetry.api.common.AttributeKey; @@ -36,8 +37,10 @@ public class UdpExporterTest { public void testUdpExporterWithDefaults() { OtlpUdpSpanExporter exporter = new OtlpUdpSpanExporterBuilder().build(); UdpSender sender = exporter.getSender(); - assertThat(sender.getEndpoint().getHostName()) - .isEqualTo("localhost"); // getHostName implicitly converts 127.0.0.1 to localhost + String senderEndpointHostName = sender.getEndpoint().getHostName(); + // getHostName may or may not convert 127.0.0.1 to localhost + assertTrue( + senderEndpointHostName.equals("localhost") || senderEndpointHostName.equals("127.0.0.1")); assertThat(sender.getEndpoint().getPort()).isEqualTo(2000); assertThat(exporter.getPayloadPrefix()).endsWith("T1S"); } diff --git a/build.gradle.kts b/build.gradle.kts index 843124bd80..6fdbd31a30 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -37,6 +37,8 @@ nebulaRelease { addReleaseBranchPattern("""v\d+\.\d+\.x""") } +apply(from = "version.gradle.kts") + nexusPublishing { repositories { sonatype { @@ -71,7 +73,7 @@ allprojects { ktlint("1.4.0").editorConfigOverride(mapOf("indent_size" to "2", "continuation_indent_size" to "2")) // Doesn't support pluginManagement block - targetExclude("settings.gradle.kts") + targetExclude("settings.gradle.kts", "version.gradle.kts") if (!project.path.startsWith(":sample-apps:")) { licenseHeaderFile("${rootProject.projectDir}/config/license/header.java", "plugins|include|import") diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 19a1b3b29c..cf5f3c2f26 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,8 +27,8 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.11.0-adot1" -val otelSnapshotVersion = "2.12.0" +val otelVersion = "2.18.1-adot1" +val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" // All versions below are only used in testing and do not affect the released artifact. @@ -77,8 +77,8 @@ val dependencyLists = listOf( "commons-logging:commons-logging:1.2", "com.sparkjava:spark-core:2.9.4", "com.squareup.okhttp3:okhttp:4.12.0", - "io.opentelemetry.contrib:opentelemetry-aws-xray:1.39.0-adot1", - "io.opentelemetry.contrib:opentelemetry-aws-resources:1.39.0-alpha", + "io.opentelemetry.contrib:opentelemetry-aws-xray:1.48.0-adot1", + "io.opentelemetry.contrib:opentelemetry-aws-resources:1.48.0-alpha", "io.opentelemetry.proto:opentelemetry-proto:1.0.0-alpha", "io.opentelemetry.javaagent:opentelemetry-javaagent:$otelJavaAgentVersion", "io.opentelemetry:opentelemetry-extension-aws:1.20.1", diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e6441136f3..a4b76b9530 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 9355b41557..d4081da476 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index b740cf1339..f5feea6d6b 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 25da30dbde..9d21a21834 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## diff --git a/instrumentation/aws-sdk/build.gradle.kts b/instrumentation/aws-sdk/build.gradle.kts index 101e966a12..58fb6b48d7 100644 --- a/instrumentation/aws-sdk/build.gradle.kts +++ b/instrumentation/aws-sdk/build.gradle.kts @@ -41,4 +41,6 @@ dependencies { testImplementation("com.amazonaws:aws-java-sdk-sns:1.11.106") testImplementation("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") testImplementation("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") + + testRuntimeOnly("org.junit.platform:junit-platform-launcher") } diff --git a/instrumentation/log4j-2.13.2/build.gradle.kts b/instrumentation/log4j-2.13.2/build.gradle.kts index 529f9953c3..027ded24a8 100644 --- a/instrumentation/log4j-2.13.2/build.gradle.kts +++ b/instrumentation/log4j-2.13.2/build.gradle.kts @@ -25,5 +25,5 @@ dependencies { compileOnly("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") compileOnly("net.bytebuddy:byte-buddy") - compileOnly("org.apache.logging.log4j:log4j-core:2.22.1") + compileOnly("org.apache.logging.log4j:log4j-core:2.25.1") } diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index 3190182c3b..473d83317d 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -44,8 +44,6 @@ patch -p1 < "$SOURCEDIR"/../.github/patches/opentelemetry-java-instrumentation.p # This patch is for Lambda related context propagation patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch -patch -p1 < "$SOURCEDIR"/patches/StreamHandlerInstrumentation.patch - ./gradlew publishToMavenLocal popd rm -rf opentelemetry-java-instrumentation @@ -60,7 +58,7 @@ popd ## Build ADOT Lambda Java SDK Layer Code echo "Info: Building ADOT Lambda Java SDK Layer Code" -./gradlew build -PotelVersion=${version} +./gradlew build -PotelVersion=${otel_instrumentation_version} -Pversion=${version} ## Copy ADOT Java Agent downloaded using Gradle task and bundle it with the Lambda handler script @@ -75,4 +73,4 @@ popd ## Cleanup # revert the patch applied since it is only needed while building the layer. echo "Info: Cleanup" -git restore ../dependencyManagement/build.gradle.kts \ No newline at end of file +git restore ../dependencyManagement/build.gradle.kts diff --git a/lambda-layer/build.gradle.kts b/lambda-layer/build.gradle.kts index 059294cfec..ade2a438d3 100644 --- a/lambda-layer/build.gradle.kts +++ b/lambda-layer/build.gradle.kts @@ -27,6 +27,7 @@ val javaagentDependency by configurations.creating { extendsFrom() } +val version: String by project val otelVersion: String by project dependencies { @@ -35,7 +36,7 @@ dependencies { // Already included in wrapper so compileOnly compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi") compileOnly("io.opentelemetry:opentelemetry-sdk-extension-aws") - javaagentDependency("software.amazon.opentelemetry:aws-opentelemetry-agent:$otelVersion-adot-lambda1") + javaagentDependency("software.amazon.opentelemetry:aws-opentelemetry-agent:$version-adot-lambda1") } tasks.register("download") { diff --git a/lambda-layer/gradle/wrapper/gradle-wrapper.jar b/lambda-layer/gradle/wrapper/gradle-wrapper.jar index 943f0cbfa7..c1962a79e2 100644 Binary files a/lambda-layer/gradle/wrapper/gradle-wrapper.jar and b/lambda-layer/gradle/wrapper/gradle-wrapper.jar differ diff --git a/lambda-layer/gradle/wrapper/gradle-wrapper.properties b/lambda-layer/gradle/wrapper/gradle-wrapper.properties index 37aef8d3f0..aa02b02fc6 100644 --- a/lambda-layer/gradle/wrapper/gradle-wrapper.properties +++ b/lambda-layer/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.1.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/lambda-layer/gradlew b/lambda-layer/gradlew index 5bba57aa19..aeb74cbb43 100755 --- a/lambda-layer/gradlew +++ b/lambda-layer/gradlew @@ -1,4 +1,3 @@ - #!/bin/sh # @@ -86,9 +85,6 @@ done APP_BASE_NAME=${0##*/} APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -198,6 +194,10 @@ if "$cygwin" || "$msys" ; then done fi + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + # Collect all arguments for the java command; # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # shell script including quotes and variable substitutions, so put them in diff --git a/lambda-layer/gradlew.bat b/lambda-layer/gradlew.bat index 084f02df4a..93e3f59f13 100644 --- a/lambda-layer/gradlew.bat +++ b/lambda-layer/gradlew.bat @@ -1,4 +1,3 @@ - @rem @rem Copyright 2015 the original author or authors. @rem diff --git a/lambda-layer/otel-instrument b/lambda-layer/otel-instrument index 662520d8b4..a718c8ab75 100644 --- a/lambda-layer/otel-instrument +++ b/lambda-layer/otel-instrument @@ -2,7 +2,7 @@ export OTEL_INSTRUMENTATION_AWS_SDK_EXPERIMENTAL_SPAN_ATTRIBUTES=true -export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,xray,tracecontext}" +export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,tracecontext,xray}" export OTEL_SERVICE_NAME=${OTEL_SERVICE_NAME:-${AWS_LAMBDA_FUNCTION_NAME}} @@ -65,4 +65,4 @@ fi ARGS=("${ARGS[0]}" "${EXTRA_ARGS[@]}" "${ARGS[@]:1}") -exec "${ARGS[@]}" \ No newline at end of file +exec "${ARGS[@]}" diff --git a/lambda-layer/patches/StreamHandlerInstrumentation.patch b/lambda-layer/patches/StreamHandlerInstrumentation.patch deleted file mode 100644 index c4d4751c89..0000000000 --- a/lambda-layer/patches/StreamHandlerInstrumentation.patch +++ /dev/null @@ -1,513 +0,0 @@ -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -index 35d6b70ed6..b6a305178e 100644 ---- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -@@ -6,17 +6,18 @@ - package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; - - import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; --import static java.util.Collections.singletonList; - import static net.bytebuddy.matcher.ElementMatchers.not; - - import com.google.auto.service.AutoService; - import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; - import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import java.util.Arrays; - import java.util.List; - import net.bytebuddy.matcher.ElementMatcher; - - @AutoService(InstrumentationModule.class) - public class AwsLambdaInstrumentationModule extends InstrumentationModule { -+ - public AwsLambdaInstrumentationModule() { - super("aws-lambda-core", "aws-lambda-core-1.0", "aws-lambda"); - } -@@ -34,6 +35,8 @@ public class AwsLambdaInstrumentationModule extends InstrumentationModule { - - @Override - public List typeInstrumentations() { -- return singletonList(new AwsLambdaRequestHandlerInstrumentation()); -+ return Arrays.asList( -+ new AwsLambdaRequestHandlerInstrumentation(), -+ new AwsLambdaRequestStreamHandlerInstrumentation()); - } - } -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java -new file mode 100644 -index 0000000000..1c4ef1ac07 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java -@@ -0,0 +1,98 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; -+ -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; -+import static io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0.AwsLambdaInstrumentationHelper.functionInstrumenter; -+import static net.bytebuddy.matcher.ElementMatchers.isMethod; -+import static net.bytebuddy.matcher.ElementMatchers.isPublic; -+import static net.bytebuddy.matcher.ElementMatchers.nameStartsWith; -+import static net.bytebuddy.matcher.ElementMatchers.named; -+import static net.bytebuddy.matcher.ElementMatchers.not; -+import static net.bytebuddy.matcher.ElementMatchers.takesArgument; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import io.opentelemetry.context.Scope; -+import io.opentelemetry.instrumentation.awslambdacore.v1_0.AwsLambdaRequest; -+import io.opentelemetry.javaagent.bootstrap.OpenTelemetrySdkAccess; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; -+import java.io.InputStream; -+import java.util.Collections; -+import java.util.concurrent.TimeUnit; -+import net.bytebuddy.asm.Advice; -+import net.bytebuddy.description.type.TypeDescription; -+import net.bytebuddy.implementation.bytecode.assign.Assigner.Typing; -+import net.bytebuddy.matcher.ElementMatcher; -+ -+public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrumentation { -+ -+ @Override -+ public ElementMatcher classLoaderOptimization() { -+ return hasClassesNamed("com.amazonaws.services.lambda.runtime.RequestStreamHandler"); -+ } -+ -+ @Override -+ public ElementMatcher typeMatcher() { -+ return implementsInterface(named("com.amazonaws.services.lambda.runtime.RequestStreamHandler")) -+ .and(not(nameStartsWith("com.amazonaws.services.lambda.runtime.api.client"))) -+ // In Java 8 and Java 11 runtimes, -+ // AWS Lambda runtime is packaged under `lambdainternal` package. -+ // But it is `com.amazonaws.services.lambda.runtime.api.client` -+ // for new runtime likes Java 17 and Java 21. -+ .and(not(nameStartsWith("lambdainternal"))); -+ } -+ -+ @Override -+ public void transform(TypeTransformer transformer) { -+ transformer.applyAdviceToMethod( -+ isMethod() -+ .and(isPublic()) -+ .and(named("handleRequest")) -+ .and(takesArgument(2, named("com.amazonaws.services.lambda.runtime.Context"))), -+ AwsLambdaRequestStreamHandlerInstrumentation.class.getName() + "$HandleRequestAdvice"); -+ } -+ -+ @SuppressWarnings("unused") -+ public static class HandleRequestAdvice { -+ -+ @Advice.OnMethodEnter(suppress = Throwable.class) -+ public static void onEnter( -+ @Advice.Argument(0) InputStream input, -+ @Advice.Argument(2) Context context, -+ @Advice.Local("otelInput") AwsLambdaRequest otelInput, -+ @Advice.Local("otelContext") io.opentelemetry.context.Context otelContext, -+ @Advice.Local("otelScope") Scope otelScope) { -+ -+ otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); -+ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput); -+ -+ if (!functionInstrumenter().shouldStart(parentContext, otelInput)) { -+ return; -+ } -+ -+ otelContext = functionInstrumenter().start(parentContext, otelInput); -+ otelScope = otelContext.makeCurrent(); -+ } -+ -+ @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) -+ public static void stopSpan( -+ @Advice.Argument(value = 0, typing = Typing.DYNAMIC) Object arg, -+ @Advice.Thrown Throwable throwable, -+ @Advice.Local("otelInput") AwsLambdaRequest input, -+ @Advice.Local("otelContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelScope") Scope functionScope) { -+ -+ if (functionScope != null) { -+ functionScope.close(); -+ functionInstrumenter().end(functionContext, input, null, throwable); -+ } -+ -+ OpenTelemetrySdkAccess.forceFlush((long)1, TimeUnit.SECONDS); -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java -new file mode 100644 -index 0000000000..7bed968d77 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java -@@ -0,0 +1,113 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; -+ -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static org.assertj.core.api.Assertions.assertThat; -+import static org.assertj.core.api.Assertions.catchThrowable; -+import static org.mockito.Mockito.when; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -+import io.opentelemetry.api.trace.SpanKind; -+import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; -+import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; -+import io.opentelemetry.sdk.trace.data.StatusData; -+import io.opentelemetry.semconv.SemanticAttributes; -+import java.io.BufferedReader; -+import java.io.BufferedWriter; -+import java.io.ByteArrayInputStream; -+import java.io.ByteArrayOutputStream; -+import java.io.IOException; -+import java.io.InputStream; -+import java.io.InputStreamReader; -+import java.io.OutputStream; -+import java.io.OutputStreamWriter; -+import java.nio.charset.StandardCharsets; -+import org.junit.jupiter.api.AfterEach; -+import org.junit.jupiter.api.BeforeEach; -+import org.junit.jupiter.api.Test; -+import org.junit.jupiter.api.extension.ExtendWith; -+import org.junit.jupiter.api.extension.RegisterExtension; -+import org.mockito.Mock; -+import org.mockito.junit.jupiter.MockitoExtension; -+ -+@ExtendWith(MockitoExtension.class) -+public class AwsLambdaStreamHandlerTest { -+ -+ @RegisterExtension -+ public static final InstrumentationExtension testing = AgentInstrumentationExtension.create(); -+ -+ @Mock private Context context; -+ -+ @BeforeEach -+ void setUp() { -+ when(context.getFunctionName()).thenReturn("my_function"); -+ when(context.getAwsRequestId()).thenReturn("1-22-333"); -+ } -+ -+ @AfterEach -+ void tearDown() { -+ assertThat(testing.forceFlushCalled()).isTrue(); -+ } -+ -+ @Test -+ void handlerTraced() throws Exception { -+ InputStream input = new ByteArrayInputStream("hello\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ handler.handleRequest(input, output, context); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ @Test -+ void handlerTracedWithException() { -+ InputStream input = new ByteArrayInputStream("bye\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ -+ Throwable thrown = catchThrowable(() -> handler.handleRequest(input, output, context)); -+ assertThat(thrown).isInstanceOf(IllegalArgumentException.class); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasStatus(StatusData.error()) -+ .hasException(thrown) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ static final class RequestStreamHandlerTestImpl implements RequestStreamHandler { -+ @Override -+ public void handleRequest(InputStream input, OutputStream output, Context context) -+ throws IOException { -+ BufferedReader reader = -+ new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8)); -+ BufferedWriter writer = -+ new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8)); -+ String line = reader.readLine(); -+ if (line.equals("hello")) { -+ writer.write("world"); -+ writer.flush(); -+ writer.close(); -+ } else { -+ throw new IllegalArgumentException("bad argument"); -+ } -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -index 9e0e372241..2dd6051c23 100644 ---- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -@@ -6,11 +6,11 @@ - package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; - - import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; --import static java.util.Collections.singletonList; - - import com.google.auto.service.AutoService; - import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; - import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import java.util.Arrays; - import java.util.List; - import net.bytebuddy.matcher.ElementMatcher; - -@@ -32,6 +32,8 @@ public class AwsLambdaInstrumentationModule extends InstrumentationModule { - - @Override - public List typeInstrumentations() { -- return singletonList(new AwsLambdaRequestHandlerInstrumentation()); -+ return Arrays.asList( -+ new AwsLambdaRequestHandlerInstrumentation(), -+ new AwsLambdaRequestStreamHandlerInstrumentation()); - } - } -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java -new file mode 100644 -index 0000000000..f21a4a5526 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java -@@ -0,0 +1,104 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; -+ -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; -+import static net.bytebuddy.matcher.ElementMatchers.isMethod; -+import static net.bytebuddy.matcher.ElementMatchers.isPublic; -+import static net.bytebuddy.matcher.ElementMatchers.named; -+import static net.bytebuddy.matcher.ElementMatchers.takesArgument; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.events.SQSEvent; -+import io.opentelemetry.context.Scope; -+import io.opentelemetry.instrumentation.awslambdacore.v1_0.AwsLambdaRequest; -+import io.opentelemetry.javaagent.bootstrap.OpenTelemetrySdkAccess; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; -+import java.io.InputStream; -+import java.util.Collections; -+import java.util.concurrent.TimeUnit; -+import net.bytebuddy.asm.Advice; -+import net.bytebuddy.description.type.TypeDescription; -+import net.bytebuddy.implementation.bytecode.assign.Assigner.Typing; -+import net.bytebuddy.matcher.ElementMatcher; -+ -+public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrumentation { -+ -+ @Override -+ public ElementMatcher classLoaderOptimization() { -+ return hasClassesNamed("com.amazonaws.services.lambda.runtime.RequestStreamHandler"); -+ } -+ -+ @Override -+ public ElementMatcher typeMatcher() { -+ return implementsInterface(named("com.amazonaws.services.lambda.runtime.RequestStreamHandler")); -+ } -+ -+ @Override -+ public void transform(TypeTransformer transformer) { -+ transformer.applyAdviceToMethod( -+ isMethod() -+ .and(isPublic()) -+ .and(named("handleRequest")) -+ .and(takesArgument(2, named("com.amazonaws.services.lambda.runtime.Context"))), -+ AwsLambdaRequestStreamHandlerInstrumentation.class.getName() + "$HandleRequestAdvice"); -+ } -+ -+ @SuppressWarnings("unused") -+ public static class HandleRequestAdvice { -+ -+ @Advice.OnMethodEnter(suppress = Throwable.class) -+ public static void onEnter( -+ @Advice.Argument(0) InputStream input, -+ @Advice.Argument(2) Context context, -+ @Advice.Local("otelInput") AwsLambdaRequest otelInput, -+ @Advice.Local("otelFunctionContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelFunctionScope") Scope functionScope, -+ @Advice.Local("otelMessageContext") io.opentelemetry.context.Context messageContext, -+ @Advice.Local("otelMessageScope") Scope messageScope) { -+ otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); -+ io.opentelemetry.context.Context parentContext = -+ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput); -+ -+ if (!AwsLambdaInstrumentationHelper.functionInstrumenter() -+ .shouldStart(parentContext, otelInput)) { -+ return; -+ } -+ -+ functionContext = -+ AwsLambdaInstrumentationHelper.functionInstrumenter().start(parentContext, otelInput); -+ -+ functionScope = functionContext.makeCurrent(); -+ } -+ -+ @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) -+ public static void stopSpan( -+ @Advice.Argument(value = 0, typing = Typing.DYNAMIC) Object arg, -+ @Advice.Thrown Throwable throwable, -+ @Advice.Local("otelInput") AwsLambdaRequest input, -+ @Advice.Local("otelFunctionContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelFunctionScope") Scope functionScope, -+ @Advice.Local("otelMessageContext") io.opentelemetry.context.Context messageContext, -+ @Advice.Local("otelMessageScope") Scope messageScope) { -+ -+ if (messageScope != null) { -+ messageScope.close(); -+ AwsLambdaInstrumentationHelper.messageInstrumenter() -+ .end(messageContext, (SQSEvent) arg, null, throwable); -+ } -+ -+ if (functionScope != null) { -+ functionScope.close(); -+ AwsLambdaInstrumentationHelper.functionInstrumenter() -+ .end(functionContext, input, null, throwable); -+ } -+ -+ OpenTelemetrySdkAccess.forceFlush((long)1, TimeUnit.SECONDS); -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java -new file mode 100644 -index 0000000000..e30690418d ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java -@@ -0,0 +1,113 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; -+ -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static org.assertj.core.api.Assertions.assertThat; -+import static org.assertj.core.api.Assertions.catchThrowable; -+import static org.mockito.Mockito.when; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -+import io.opentelemetry.api.trace.SpanKind; -+import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; -+import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; -+import io.opentelemetry.sdk.trace.data.StatusData; -+import io.opentelemetry.semconv.SemanticAttributes; -+import java.io.BufferedReader; -+import java.io.BufferedWriter; -+import java.io.ByteArrayInputStream; -+import java.io.ByteArrayOutputStream; -+import java.io.IOException; -+import java.io.InputStream; -+import java.io.InputStreamReader; -+import java.io.OutputStream; -+import java.io.OutputStreamWriter; -+import java.nio.charset.StandardCharsets; -+import org.junit.jupiter.api.AfterEach; -+import org.junit.jupiter.api.BeforeEach; -+import org.junit.jupiter.api.Test; -+import org.junit.jupiter.api.extension.ExtendWith; -+import org.junit.jupiter.api.extension.RegisterExtension; -+import org.mockito.Mock; -+import org.mockito.junit.jupiter.MockitoExtension; -+ -+@ExtendWith(MockitoExtension.class) -+public class AwsLambdaStreamHandlerTest { -+ -+ @RegisterExtension -+ public static final InstrumentationExtension testing = AgentInstrumentationExtension.create(); -+ -+ @Mock private Context context; -+ -+ @BeforeEach -+ void setUp() { -+ when(context.getFunctionName()).thenReturn("my_function"); -+ when(context.getAwsRequestId()).thenReturn("1-22-333"); -+ } -+ -+ @AfterEach -+ void tearDown() { -+ assertThat(testing.forceFlushCalled()).isTrue(); -+ } -+ -+ @Test -+ void handlerTraced() throws Exception { -+ InputStream input = new ByteArrayInputStream("hello\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ handler.handleRequest(input, output, context); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ @Test -+ void handlerTracedWithException() { -+ InputStream input = new ByteArrayInputStream("bye\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ -+ Throwable thrown = catchThrowable(() -> handler.handleRequest(input, output, context)); -+ assertThat(thrown).isInstanceOf(IllegalArgumentException.class); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasStatus(StatusData.error()) -+ .hasException(thrown) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ static final class RequestStreamHandlerTestImpl implements RequestStreamHandler { -+ @Override -+ public void handleRequest(InputStream input, OutputStream output, Context context) -+ throws IOException { -+ BufferedReader reader = -+ new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8)); -+ BufferedWriter writer = -+ new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8)); -+ String line = reader.readLine(); -+ if (line.equals("hello")) { -+ writer.write("world"); -+ writer.flush(); -+ writer.close(); -+ } else { -+ throw new IllegalArgumentException("bad argument"); -+ } -+ } -+ } -+} diff --git a/lambda-layer/patches/aws-otel-java-instrumentation.patch b/lambda-layer/patches/aws-otel-java-instrumentation.patch index f2e08b6c29..f95c364151 100644 --- a/lambda-layer/patches/aws-otel-java-instrumentation.patch +++ b/lambda-layer/patches/aws-otel-java-instrumentation.patch @@ -1,13 +1,13 @@ diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts -index 9493189..6090207 100644 +index d186406..91b9386 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,7 +27,7 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" - + // This is the version of the upstream instrumentation BOM --val otelVersion = "2.11.0-adot1" -+val otelVersion = "2.11.0-adot-lambda1" - val otelSnapshotVersion = "2.12.0" +-val otelVersion = "2.18.1-adot1" ++val otelVersion = "2.18.1-adot-lambda1" + val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" diff --git a/lambda-layer/patches/opentelemetry-java-instrumentation.patch b/lambda-layer/patches/opentelemetry-java-instrumentation.patch index cca35f0ed0..220947b7a7 100644 --- a/lambda-layer/patches/opentelemetry-java-instrumentation.patch +++ b/lambda-layer/patches/opentelemetry-java-instrumentation.patch @@ -1,5 +1,83 @@ +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java +index 93071e04d2..add9f64276 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java +@@ -68,7 +68,7 @@ public class AwsLambdaRequestHandlerInstrumentation implements TypeInstrumentati + @Advice.Local("otelContext") io.opentelemetry.context.Context otelContext, + @Advice.Local("otelScope") Scope otelScope) { + input = AwsLambdaRequest.create(context, arg, Collections.emptyMap()); +- io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(input); ++ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(input, context); + + if (!functionInstrumenter().shouldStart(parentContext, input)) { + return; +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java +index a6b89d253d..e62d30eddb 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java +@@ -69,7 +69,7 @@ public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrum + @Advice.Local("otelScope") Scope otelScope) { + + otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); +- io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput); ++ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput, context); + + if (!functionInstrumenter().shouldStart(parentContext, otelInput)) { + return; +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts +index df605add2f..e16c736990 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts +@@ -5,11 +5,12 @@ plugins { + dependencies { + compileOnly("io.opentelemetry:opentelemetry-sdk") + compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure") ++ compileOnly(project(":muzzle")) + + compileOnly("com.google.auto.value:auto-value-annotations") + annotationProcessor("com.google.auto.value:auto-value") + +- library("com.amazonaws:aws-lambda-java-core:1.0.0") ++ library("com.amazonaws:aws-lambda-java-core:1.4.0") + + // We do lightweight parsing of JSON to extract HTTP headers from requests for propagation. + // This will be commonly needed even for users that don't use events, but luckily it's not too big. +@@ -26,6 +27,7 @@ dependencies { + + testImplementation(project(":instrumentation:aws-lambda:aws-lambda-core-1.0:testing")) + testImplementation("uk.org.webcompere:system-stubs-jupiter") ++ testImplementation("com.google.guava:guava") + } + + tasks.withType().configureEach { +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java +index 873040f66e..b38648e8cf 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java +@@ -66,7 +66,7 @@ public abstract class TracingRequestHandler implements RequestHandler headers = input.getHeaders(); if (input.getAwsContext() != null && input.getAwsContext().getClientContext() != null) { -@@ -59,23 +57,15 @@ public class AwsLambdaFunctionInstrumenter { +@@ -59,23 +58,15 @@ public class AwsLambdaFunctionInstrumenter { } } - + - return openTelemetry - .getPropagators() - .getTextMapPropagator() - .extract(Context.root(), headers, MapGetter.INSTANCE); -+ return ParentContextExtractor.extract(headers, this); ++ return ParentContextExtractor.extract(headers, this, lambdaContext); } - + - private enum MapGetter implements TextMapGetter> { - INSTANCE; - @@ -58,7 +139,7 @@ index 9341bf6f79..2208c3c482 100644 - } + public Context extract(Map headers, TextMapGetter> getter) { + ContextPropagationDebug.debugContextLeakIfEnabled(); - + - @Override - public String get(Map map, String s) { - return map.get(s.toLowerCase(Locale.ROOT)); @@ -71,10 +152,10 @@ index 9341bf6f79..2208c3c482 100644 } diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java new file mode 100644 -index 0000000000..439ed0de07 +index 0000000000..6349d1bb29 --- /dev/null +++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java -@@ -0,0 +1,77 @@ +@@ -0,0 +1,85 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 @@ -84,13 +165,12 @@ index 0000000000..439ed0de07 + +import static io.opentelemetry.instrumentation.awslambdacore.v1_0.internal.MapUtils.lowercaseMap; + -+import io.opentelemetry.api.trace.Span; -+import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.TextMapGetter; -+import java.util.Collections; ++import io.opentelemetry.javaagent.tooling.muzzle.NoMuzzle; +import java.util.Locale; +import java.util.Map; ++import java.util.logging.Logger; + +/** + * This class is internal and is hence not for public use. Its APIs are unstable and can change at @@ -98,44 +178,53 @@ index 0000000000..439ed0de07 + */ +public final class ParentContextExtractor { + ++ private static final Logger logger = Logger.getLogger(ParentContextExtractor.class.getName()); + private static final String AWS_TRACE_HEADER_ENV_KEY = "_X_AMZN_TRACE_ID"; + private static final String AWS_TRACE_HEADER_PROP = "com.amazonaws.xray.traceHeader"; + // lower-case map getter used for extraction + static final String AWS_TRACE_HEADER_PROPAGATOR_KEY = "x-amzn-trace-id"; ++ static boolean getXrayTraceIdMethodExists = true; + -+ static Context extract(Map headers, AwsLambdaFunctionInstrumenter instrumenter) { -+ Context parentContext = null; -+ String parentTraceHeader = getTraceHeader(); ++ static Context extract( ++ Map headers, ++ AwsLambdaFunctionInstrumenter instrumenter, ++ com.amazonaws.services.lambda.runtime.Context lambdaContext) { ++ Map mergedHeaders = lowercaseMap(headers); ++ String parentTraceHeader = getTraceHeader(lambdaContext); + if (parentTraceHeader != null) { -+ parentContext = instrumenter.extract( -+ Collections.singletonMap(AWS_TRACE_HEADER_PROPAGATOR_KEY, parentTraceHeader), -+ MapGetter.INSTANCE); ++ mergedHeaders.put(AWS_TRACE_HEADER_PROPAGATOR_KEY, parentTraceHeader); + } -+ if (!isValidAndSampled(parentContext)) { -+ // try http -+ parentContext = instrumenter.extract(lowercaseMap(headers), MapGetter.INSTANCE); -+ } -+ return parentContext; ++ return instrumenter.extract(mergedHeaders, MapGetter.INSTANCE); + } + -+ private static String getTraceHeader() { ++ @NoMuzzle ++ private static String getTraceHeader( ++ com.amazonaws.services.lambda.runtime.Context lambdaContext) { ++ String traceHeader = null; ++ ++ // Lambda Core dependency that is actually used by Lambda Runtime may be on an older version ++ // that does not have the `getXrayTraceId` method. If `NoSuchMethodError` occurs, we do not ++ // attempt invoking `getXrayTraceId` again. ++ if (getXrayTraceIdMethodExists) { ++ try { ++ traceHeader = lambdaContext.getXrayTraceId(); ++ } catch (NoSuchMethodError e) { ++ logger.fine("Failed to get X-Ray trace ID from lambdaContext: " + e); ++ getXrayTraceIdMethodExists = false; ++ } ++ } ++ if (traceHeader != null && !traceHeader.isEmpty()) { ++ return traceHeader; ++ } ++ + // Lambda propagates trace header by system property instead of environment variable from java17 -+ String traceHeader = System.getProperty(AWS_TRACE_HEADER_PROP); ++ traceHeader = System.getProperty(AWS_TRACE_HEADER_PROP); + if (traceHeader == null || traceHeader.isEmpty()) { + return System.getenv(AWS_TRACE_HEADER_ENV_KEY); + } + return traceHeader; + } + -+ private static boolean isValidAndSampled(Context context) { -+ if (context == null) { -+ return false; -+ } -+ Span parentSpan = Span.fromContext(context); -+ SpanContext parentSpanContext = parentSpan.getSpanContext(); -+ return (parentSpanContext.isValid() && parentSpanContext.isSampled()); -+ } -+ + private enum MapGetter implements TextMapGetter> { + INSTANCE; + @@ -152,12 +241,25 @@ index 0000000000..439ed0de07 + + private ParentContextExtractor() {} +} +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java +index cb19d1e568..12ed174bb2 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java +@@ -37,7 +37,7 @@ class InstrumenterExtractionTest { + + AwsLambdaRequest input = AwsLambdaRequest.create(awsContext, new HashMap<>(), new HashMap<>()); + +- Context extracted = instr.extract(input); ++ Context extracted = instr.extract(input, awsContext); + SpanContext spanContext = Span.fromContext(extracted).getSpanContext(); + assertThat(spanContext.getTraceId()).isEqualTo("4bf92f3577b34da6a3ce929d0e0e4736"); + assertThat(spanContext.getSpanId()).isEqualTo("00f067aa0ba902b7"); diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java new file mode 100644 -index 0000000000..1fa0b6e536 +index 0000000000..4b0f354769 --- /dev/null +++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java -@@ -0,0 +1,135 @@ +@@ -0,0 +1,375 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 @@ -166,13 +268,19 @@ index 0000000000..1fa0b6e536 +package io.opentelemetry.instrumentation.awslambdacore.v1_0.internal; + +import static org.assertj.core.api.Assertions.assertThat; ++import static org.mockito.Mockito.mock; ++import static org.mockito.Mockito.times; ++import static org.mockito.Mockito.verify; ++import static org.mockito.Mockito.when; + ++import com.amazonaws.services.lambda.runtime.Context; +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; -+import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; ++import io.opentelemetry.context.propagation.TextMapPropagator; ++import io.opentelemetry.contrib.awsxray.propagator.AwsXrayPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import java.util.Map; +import org.junit.jupiter.api.Test; @@ -190,12 +298,33 @@ index 0000000000..1fa0b6e536 +class ParentContextExtractorTest { + + @SystemStub final EnvironmentVariables environmentVariables = new EnvironmentVariables(); ++ @SystemStub final SystemProperties systemProperties = new SystemProperties(); + -+ private static final OpenTelemetry OTEL = ++ private static final OpenTelemetry OTEL_WITH_B3_PROPAGATOR = + OpenTelemetry.propagating(ContextPropagators.create(B3Propagator.injectingSingleHeader())); + -+ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER = -+ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL); ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_B3_PROPAGATOR = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_B3_PROPAGATOR); ++ ++ // Only for new lambda context tests ++ private static final OpenTelemetry OTEL_WITH_B3_XRAY_PROPAGATORS = ++ OpenTelemetry.propagating( ++ ContextPropagators.create( ++ TextMapPropagator.composite( ++ B3Propagator.injectingSingleHeader(), AwsXrayPropagator.getInstance()))); ++ private static final OpenTelemetry OTEL_WITH_XRAY_B3_PROPAGATORS = ++ OpenTelemetry.propagating( ++ ContextPropagators.create( ++ TextMapPropagator.composite( ++ AwsXrayPropagator.getInstance(), B3Propagator.injectingSingleHeader()))); ++ ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_B3_XRAY_PROPAGATORS); ++ ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_XRAY_B3_PROPAGATORS = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_XRAY_B3_PROPAGATORS); ++ ++ private static final Context mockLambdaContext = mock(Context.class); + + @Test + void shouldUseHttpIfAwsParentNotSampled() { @@ -213,7 +342,8 @@ index 0000000000..1fa0b6e536 + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=0"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract(headers, INSTRUMENTER_WITH_B3_PROPAGATOR, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -239,7 +369,9 @@ index 0000000000..1fa0b6e536 + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -262,7 +394,8 @@ index 0000000000..1fa0b6e536 + "true"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract(headers, INSTRUMENTER_WITH_B3_PROPAGATOR, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -277,22 +410,257 @@ index 0000000000..1fa0b6e536 + // given + systemProperties.set( + "com.amazonaws.xray.traceHeader", -+ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=0"); ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ environmentVariables.set( ++ "_X_AMZN_TRACE_ID", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ ImmutableMap.of(), INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContext); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ } ++ ++ @Test ++ void shouldUseLambdaContextToExtractXrayTraceId() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverSystemProperty() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverEnvVariable() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ environmentVariables.set( ++ "_X_AMZN_TRACE_ID", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverHttp() { ++ // given ++ Map headers = ++ ImmutableMap.of( ++ "X-b3-traceId", ++ "4fd0b6131f19f39af59518d127b0cafe", ++ "x-b3-spanid", ++ "0000000000000123", ++ "X-B3-Sampled", ++ "true"); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000456"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa6"); ++ } ++ ++ @Test ++ void shouldPreferHttpOverXrayIdSetByLambdaContext() { ++ // given ++ Map headers = ++ ImmutableMap.of( ++ "X-b3-traceId", ++ "4fd0b6131f19f39af59518d127b0cafe", ++ "x-b3-spanid", ++ "0000000000000123", ++ "X-B3-Sampled", ++ "true"); + environmentVariables.set( + "_X_AMZN_TRACE_ID", + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_XRAY_B3_PROPAGATORS, mockLambdaContextWithXrayTraceId); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); + assertThat(spanContext.isValid()).isTrue(); + assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldFallbackToSystemPropertyIfContextTraceIdIsNull() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()).thenReturn(null); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ } ++ ++ @Test ++ void shouldFallbackToSystemPropertyIfContextTraceIdIsEmptyString() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()).thenReturn(""); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ } ++ ++ @Test ++ void shouldFallbackToSystemPropertyWhenNoSuchMethodErrorThrown() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithNoSuchMethodError = mock(Context.class); ++ when(mockLambdaContextWithNoSuchMethodError.getXrayTraceId()) ++ .thenThrow(new NoSuchMethodError("getXrayTraceId method not found")); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // Reset the static flag to ensure the method is attempted ++ ParentContextExtractor.getXrayTraceIdMethodExists = true; ++ ++ // when - call extract ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithNoSuchMethodError); ++ ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); + assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); -+ assertThat(spanContext.getTraceId()).isEqualTo("d188f8fa79d48a391a778fa7"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ // Verify getXrayTraceId was called only once ++ assertThat(ParentContextExtractor.getXrayTraceIdMethodExists).isFalse(); ++ verify(mockLambdaContextWithNoSuchMethodError, times(1)).getXrayTraceId(); ++ ++ // when - call extract again ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithNoSuchMethodError); ++ // Verify the call count of getXrayTraceId is still 1 ++ verify(mockLambdaContextWithNoSuchMethodError, times(1)).getXrayTraceId(); + } +} +diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java +index e059250807..1fa80c3735 100644 +--- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java +@@ -70,7 +70,7 @@ public class AwsLambdaRequestHandlerInstrumentation implements TypeInstrumentati + } + input = AwsLambdaRequest.create(context, arg, headers); + io.opentelemetry.context.Context parentContext = +- AwsLambdaInstrumentationHelper.functionInstrumenter().extract(input); ++ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(input, context); + + if (!AwsLambdaInstrumentationHelper.functionInstrumenter() + .shouldStart(parentContext, input)) { +diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java +index fb5971016a..d31389e1c4 100644 +--- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java +@@ -62,7 +62,7 @@ public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrum + Map headers = Collections.emptyMap(); + otelInput = AwsLambdaRequest.create(context, input, headers); + io.opentelemetry.context.Context parentContext = +- AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput); ++ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput, context); + + if (!AwsLambdaInstrumentationHelper.functionInstrumenter() + .shouldStart(parentContext, otelInput)) { diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java index 4cd11fc0c4..7b7d62755c 100644 --- a/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java @@ -304,16 +672,16 @@ index 4cd11fc0c4..7b7d62755c 100644 - .addSpanLinksExtractor(new SqsMessageSpanLinksExtractor()) .buildInstrumenter(SpanKindExtractor.alwaysConsumer()); } - + diff --git a/version.gradle.kts b/version.gradle.kts -index 7900c9a4d9..80383d7c22 100644 +index ec9690086c..b267166804 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "2.11.0-adot1" --val alphaVersion = "2.11.0-adot1-alpha" -+val stableVersion = "2.11.0-adot-lambda1" -+val alphaVersion = "2.11.0-adot-lambda1-alpha" - +-val stableVersion = "2.18.1-adot1" +-val alphaVersion = "2.18.1-adot1-alpha" ++val stableVersion = "2.18.1-adot-lambda1" ++val alphaVersion = "2.18.1-adot-lambda1-alpha" + allprojects { if (findProperty("otel.stable") != "true") { diff --git a/sample-apps/udp-exporter-test-app/build.gradle.kts b/sample-apps/udp-exporter-test-app/build.gradle.kts index fdce4008df..a24f883540 100644 --- a/sample-apps/udp-exporter-test-app/build.gradle.kts +++ b/sample-apps/udp-exporter-test-app/build.gradle.kts @@ -1,6 +1,6 @@ plugins { id("java") - kotlin("jvm") version "1.9.0" + kotlin("jvm") version "2.1.0-RC2" id("io.spring.dependency-management") version "1.1.0" id("org.springframework.boot") version "2.7.17" } diff --git a/settings.gradle.kts b/settings.gradle.kts index 6c44234701..74b4b8ae3d 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -15,7 +15,7 @@ pluginManagement { plugins { - id("com.diffplug.spotless") version "6.25.0" + id("com.diffplug.spotless") version "7.0.3" id("com.github.ben-manes.versions") version "0.50.0" id("com.github.jk1.dependency-license-report") version "2.5" id("com.gradleup.shadow") version "8.3.5" diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index a1aa5dd68a..f10bd59c49 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -1,18 +1,18 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "bitflags" -version = "1.3.2" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] -name = "bitflags" -version = "2.4.1" +name = "bumpalo" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "cfg-if" @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.8" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", "windows-sys", @@ -40,32 +40,55 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" -version = "0.2.8" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", + "r-efi", "wasi", ] +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + [[package]] name = "libc" -version = "0.2.151" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "ppv-lite86" @@ -73,22 +96,46 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "proc-macro2" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" -version = "0.8.5" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ - "libc", "rand_chacha", "rand_core", + "zerocopy", ] [[package]] name = "rand_chacha" -version = "0.3.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core", @@ -96,63 +143,140 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ "getrandom", ] -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "rustix" -version = "0.38.28" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ - "bitflags 2.4.1", + "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys", ] +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "syn" +version = "2.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "tempfile" -version = "3.9.0" +version = "3.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" dependencies = [ - "cfg-if", "fastrand", - "redox_syscall", + "getrandom", + "once_cell", "rustix", "windows-sys", ] +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + [[package]] name = "uuid" -version = "1.5.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ "getrandom", + "js-sys", "rand", + "wasm-bindgen", ] [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "windows-sys" @@ -219,3 +343,32 @@ name = "windows_x86_64_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + +[[package]] +name = "zerocopy" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index 7169660e4d..3055ad8209 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -10,8 +10,8 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests -tempfile = "3.9.0" -uuid = { version = "1.5.0", features = ["v4", "fast-rng"] } +tempfile = "3.22.0" +uuid = { version = "1.18.1", features = ["v4", "fast-rng"] } [profile.release] # Levers to optimize the binary for size diff --git a/version.gradle.kts b/version.gradle.kts new file mode 100644 index 0000000000..9882736ff9 --- /dev/null +++ b/version.gradle.kts @@ -0,0 +1,24 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +val adotVersion = "2.18.0-SNAPSHOT" + +allprojects { + version = if (project.hasProperty("release.version")) { + project.property("release.version") as String + } else { + adotVersion + } +}