From b8dffa7779c2e24e5a29ff03548229d01bd8d0d6 Mon Sep 17 00:00:00 2001 From: Prashant Srivastava <50466688+srprash@users.noreply.github.com> Date: Fri, 1 Aug 2025 13:18:52 -0400 Subject: [PATCH 01/83] Fix Otlp Aws exporters failures for GZIP compressed telemetry exports (#1124) --- ...sApplicationSignalsCustomizerProvider.java | 20 ++++ ...pplier.java => AwsAuthHeaderSupplier.java} | 36 ++++++- .../otlp/aws/common/BaseOtlpAwsExporter.java | 13 ++- .../otlp/aws/common/CompressionMethod.java | 21 ++++ .../otlp/aws/logs/OtlpAwsLogsExporter.java | 20 ++-- .../aws/logs/OtlpAwsLogsExporterBuilder.java | 14 ++- .../otlp/aws/traces/OtlpAwsSpanExporter.java | 20 ++-- .../traces/OtlpAwsSpanExporterBuilder.java | 14 ++- .../providers/OtlpAwsExporterTest.java | 99 ++++++++++++++++--- 9 files changed, 216 insertions(+), 41 deletions(-) rename awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/{SigV4AuthHeaderSupplier.java => AwsAuthHeaderSupplier.java} (71%) create mode 100644 awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/CompressionMethod.java diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java index d6bd420475..4652885090 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java @@ -135,6 +135,12 @@ public final class AwsApplicationSignalsCustomizerProvider private static final String OTEL_TRACES_SAMPLER = "otel.traces.sampler"; private static final String OTEL_TRACES_SAMPLER_ARG = "otel.traces.sampler.arg"; static final String OTEL_EXPORTER_OTLP_LOGS_HEADERS = "otel.exporter.otlp.logs.headers"; + private static final String OTEL_EXPORTER_OTLP_COMPRESSION_CONFIG = + "otel.exporter.otlp.compression"; + private static final String OTEL_EXPORTER_OTLP_TRACES_COMPRESSION_CONFIG = + "otel.exporter.otlp.traces.compression"; + private static final String OTEL_EXPORTER_OTLP_LOGS_COMPRESSION_CONFIG = + "otel.exporter.otlp.logs.compression"; // UDP packet can be upto 64KB. To limit the packet size, we limit the exported batch size. // This is a bit of a magic number, as there is no simple way to tell how many spans can make a @@ -372,11 +378,18 @@ SpanExporter customizeSpanExporter(SpanExporter spanExporter, ConfigProperties c // and OTEL_EXPORTER_OTLP_TRACES_PROTOCOL is http/protobuf // so the given spanExporter will be an instance of OtlpHttpSpanExporter + // get compression method from environment + String compression = + configProps.getString( + OTEL_EXPORTER_OTLP_TRACES_COMPRESSION_CONFIG, + configProps.getString(OTEL_EXPORTER_OTLP_COMPRESSION_CONFIG, "none")); + try { spanExporter = OtlpAwsSpanExporterBuilder.create( (OtlpHttpSpanExporter) spanExporter, configProps.getString(OTEL_EXPORTER_OTLP_TRACES_ENDPOINT)) + .setCompression(compression) .build(); } catch (Exception e) { // This technically should never happen as the validator checks for the correct env @@ -408,10 +421,17 @@ LogRecordExporter customizeLogsExporter( // OTEL_EXPORTER_OTLP_LOGS_PROTOCOL is http/protobuf // so the given logsExporter will be an instance of OtlpHttpLogRecorderExporter + // get compression method from environment + String compression = + configProps.getString( + OTEL_EXPORTER_OTLP_LOGS_COMPRESSION_CONFIG, + configProps.getString(OTEL_EXPORTER_OTLP_COMPRESSION_CONFIG, "none")); + try { return OtlpAwsLogsExporterBuilder.create( (OtlpHttpLogRecordExporter) logsExporter, configProps.getString(OTEL_EXPORTER_OTLP_LOGS_ENDPOINT)) + .setCompression(compression) .build(); } catch (Exception e) { // This technically should never happen as the validator checks for the correct env diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/SigV4AuthHeaderSupplier.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/AwsAuthHeaderSupplier.java similarity index 71% rename from awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/SigV4AuthHeaderSupplier.java rename to awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/AwsAuthHeaderSupplier.java index ca34d829ec..b47e0916ae 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/SigV4AuthHeaderSupplier.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/AwsAuthHeaderSupplier.java @@ -16,11 +16,14 @@ package software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common; import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; import java.net.URI; import java.util.*; import java.util.function.Supplier; import java.util.logging.Level; import java.util.logging.Logger; +import java.util.zip.GZIPOutputStream; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.http.SdkHttpFullRequest; @@ -29,11 +32,11 @@ import software.amazon.awssdk.http.auth.aws.signer.AwsV4HttpSigner; import software.amazon.awssdk.http.auth.spi.signer.SignedRequest; -final class SigV4AuthHeaderSupplier implements Supplier> { +final class AwsAuthHeaderSupplier implements Supplier> { BaseOtlpAwsExporter exporter; Logger logger; - public SigV4AuthHeaderSupplier(BaseOtlpAwsExporter exporter) { + public AwsAuthHeaderSupplier(BaseOtlpAwsExporter exporter) { this.exporter = exporter; this.logger = Logger.getLogger(exporter.getClass().getName()); } @@ -41,7 +44,7 @@ public SigV4AuthHeaderSupplier(BaseOtlpAwsExporter exporter) { @Override public Map get() { try { - byte[] data = exporter.data.get(); + ByteArrayOutputStream data = exporter.data.get(); SdkHttpRequest httpRequest = SdkHttpFullRequest.builder() @@ -50,6 +53,14 @@ public Map get() { .putHeader("Content-Type", "application/x-protobuf") .build(); + // Compress the data before signing with gzip + ByteArrayOutputStream compressedData; + if (exporter.getCompression().equals(CompressionMethod.GZIP)) { + compressedData = compressWithGzip(data); + } else { + compressedData = data; + } + AwsCredentials credentials = DefaultCredentialsProvider.create().resolveCredentials(); SignedRequest signedRequest = @@ -60,7 +71,7 @@ public Map get() { .request(httpRequest) .putProperty(AwsV4HttpSigner.SERVICE_SIGNING_NAME, exporter.serviceName()) .putProperty(AwsV4HttpSigner.REGION_NAME, exporter.awsRegion) - .payload(() -> new ByteArrayInputStream(data))); + .payload(() -> new ByteArrayInputStream(compressedData.toByteArray()))); Map result = new HashMap<>(); @@ -84,4 +95,21 @@ public Map get() { return Collections.emptyMap(); } } + + /** + * Compresses the given byte array using GZIP compression. + * + * @param data the byte array stream to compress + * @return the compressed byte as a ByteArrayOutputStream + * @throws IOException if compression fails + */ + private ByteArrayOutputStream compressWithGzip(ByteArrayOutputStream data) throws IOException { + ByteArrayOutputStream compressedData = new ByteArrayOutputStream(); + + try (GZIPOutputStream gzipOut = new GZIPOutputStream(compressedData)) { + data.writeTo(gzipOut); + } + + return compressedData; + } } diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/BaseOtlpAwsExporter.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/BaseOtlpAwsExporter.java index 864f4c0a82..08ae2cc618 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/BaseOtlpAwsExporter.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/BaseOtlpAwsExporter.java @@ -15,6 +15,7 @@ package software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common; +import java.io.ByteArrayOutputStream; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; @@ -27,15 +28,21 @@ public abstract class BaseOtlpAwsExporter { protected final String awsRegion; protected final String endpoint; - protected final AtomicReference data; + protected final AtomicReference data; protected final Supplier> headerSupplier; + protected final CompressionMethod compression; - protected BaseOtlpAwsExporter(String endpoint) { + protected BaseOtlpAwsExporter(String endpoint, CompressionMethod compression) { this.endpoint = endpoint.toLowerCase(); + this.compression = compression; this.awsRegion = endpoint.split("\\.")[1]; this.data = new AtomicReference<>(); - this.headerSupplier = new SigV4AuthHeaderSupplier(this); + this.headerSupplier = new AwsAuthHeaderSupplier(this); } public abstract String serviceName(); + + public CompressionMethod getCompression() { + return this.compression; + } } diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/CompressionMethod.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/CompressionMethod.java new file mode 100644 index 0000000000..ae63dd12ff --- /dev/null +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/common/CompressionMethod.java @@ -0,0 +1,21 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common; + +public enum CompressionMethod { + NONE, + GZIP +} diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporter.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporter.java index 1f1bd2a006..f93b1f1c9a 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporter.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporter.java @@ -28,6 +28,7 @@ import java.util.StringJoiner; import javax.annotation.Nonnull; import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.BaseOtlpAwsExporter; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.CompressionMethod; /** * This exporter extends the functionality of the OtlpHttpLogsRecordExporter to allow logs to be @@ -42,19 +43,18 @@ public final class OtlpAwsLogsExporter extends BaseOtlpAwsExporter implements Lo private final OtlpHttpLogRecordExporter parentExporter; static OtlpAwsLogsExporter getDefault(String endpoint) { - return new OtlpAwsLogsExporter(endpoint); + return new OtlpAwsLogsExporter( + OtlpHttpLogRecordExporter.getDefault(), endpoint, CompressionMethod.NONE); } - static OtlpAwsLogsExporter create(OtlpHttpLogRecordExporter parent, String endpoint) { - return new OtlpAwsLogsExporter(parent, endpoint); + static OtlpAwsLogsExporter create( + OtlpHttpLogRecordExporter parent, String endpoint, CompressionMethod compression) { + return new OtlpAwsLogsExporter(parent, endpoint, compression); } - private OtlpAwsLogsExporter(String endpoint) { - this(OtlpHttpLogRecordExporter.getDefault(), endpoint); - } - - private OtlpAwsLogsExporter(OtlpHttpLogRecordExporter parentExporter, String endpoint) { - super(endpoint); + private OtlpAwsLogsExporter( + OtlpHttpLogRecordExporter parentExporter, String endpoint, CompressionMethod compression) { + super(endpoint, compression); this.parentExporterBuilder = parentExporter.toBuilder() @@ -75,7 +75,7 @@ public CompletableResultCode export(@Nonnull Collection logs) { try { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); LogsRequestMarshaler.create(logs).writeBinaryTo(buffer); - this.data.set(buffer.toByteArray()); + this.data.set(buffer); return this.parentExporter.export(logs); } catch (IOException e) { return CompletableResultCode.ofFailure(); diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporterBuilder.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporterBuilder.java index 440dce6d79..bf91bd6d4e 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporterBuilder.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/logs/OtlpAwsLogsExporterBuilder.java @@ -18,10 +18,12 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.CompressionMethod; public class OtlpAwsLogsExporterBuilder { private final OtlpHttpLogRecordExporter parentExporter; private final String endpoint; + private String compression; public static OtlpAwsLogsExporterBuilder create( OtlpHttpLogRecordExporter parentExporter, String endpoint) { @@ -32,8 +34,18 @@ public static OtlpAwsLogsExporter getDefault(String endpoint) { return OtlpAwsLogsExporter.getDefault(endpoint); } + public OtlpAwsLogsExporterBuilder setCompression(String compression) { + this.compression = compression; + return this; + } + public OtlpAwsLogsExporter build() { - return OtlpAwsLogsExporter.create(this.parentExporter, this.endpoint); + CompressionMethod compression = CompressionMethod.NONE; + if (this.compression != null && "gzip".equalsIgnoreCase(this.compression)) { + compression = CompressionMethod.GZIP; + } + + return OtlpAwsLogsExporter.create(this.parentExporter, this.endpoint, compression); } private OtlpAwsLogsExporterBuilder(OtlpHttpLogRecordExporter parentExporter, String endpoint) { diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporter.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporter.java index d2feba84a1..ff0dcf4cb3 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporter.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporter.java @@ -28,6 +28,7 @@ import java.util.StringJoiner; import javax.annotation.Nonnull; import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.BaseOtlpAwsExporter; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.CompressionMethod; /** * This exporter extends the functionality of the OtlpHttpSpanExporter to allow spans to be exported @@ -40,19 +41,18 @@ public final class OtlpAwsSpanExporter extends BaseOtlpAwsExporter implements Sp private final OtlpHttpSpanExporter parentExporter; static OtlpAwsSpanExporter getDefault(String endpoint) { - return new OtlpAwsSpanExporter(endpoint); + return new OtlpAwsSpanExporter( + OtlpHttpSpanExporter.getDefault(), endpoint, CompressionMethod.NONE); } - static OtlpAwsSpanExporter create(OtlpHttpSpanExporter parent, String endpoint) { - return new OtlpAwsSpanExporter(parent, endpoint); + static OtlpAwsSpanExporter create( + OtlpHttpSpanExporter parent, String endpoint, CompressionMethod compression) { + return new OtlpAwsSpanExporter(parent, endpoint, compression); } - private OtlpAwsSpanExporter(String endpoint) { - this(OtlpHttpSpanExporter.getDefault(), endpoint); - } - - private OtlpAwsSpanExporter(OtlpHttpSpanExporter parentExporter, String endpoint) { - super(endpoint); + private OtlpAwsSpanExporter( + OtlpHttpSpanExporter parentExporter, String endpoint, CompressionMethod compression) { + super(endpoint, compression); this.parentExporterBuilder = parentExporter.toBuilder() @@ -73,7 +73,7 @@ public CompletableResultCode export(@Nonnull Collection spans) { try { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); TraceRequestMarshaler.create(spans).writeBinaryTo(buffer); - this.data.set(buffer.toByteArray()); + this.data.set(buffer); return this.parentExporter.export(spans); } catch (IOException e) { return CompletableResultCode.ofFailure(); diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporterBuilder.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporterBuilder.java index 1b0c725136..bef2d5a589 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporterBuilder.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/exporter/otlp/aws/traces/OtlpAwsSpanExporterBuilder.java @@ -18,10 +18,12 @@ import static java.util.Objects.requireNonNull; import io.opentelemetry.exporter.otlp.http.trace.OtlpHttpSpanExporter; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.CompressionMethod; public class OtlpAwsSpanExporterBuilder { private final OtlpHttpSpanExporter parentExporter; private final String endpoint; + private String compression; public static OtlpAwsSpanExporterBuilder create( OtlpHttpSpanExporter parentExporter, String endpoint) { @@ -32,12 +34,22 @@ public static OtlpAwsSpanExporter getDefault(String endpoint) { return OtlpAwsSpanExporter.getDefault(endpoint); } + public OtlpAwsSpanExporterBuilder setCompression(String compression) { + this.compression = compression; + return this; + } + private OtlpAwsSpanExporterBuilder(OtlpHttpSpanExporter parentExporter, String endpoint) { this.parentExporter = requireNonNull(parentExporter, "Must set a parentExporter"); this.endpoint = requireNonNull(endpoint, "Must set an endpoint"); } public OtlpAwsSpanExporter build() { - return OtlpAwsSpanExporter.create(this.parentExporter, this.endpoint); + CompressionMethod compression = CompressionMethod.NONE; + if (this.compression != null && "gzip".equalsIgnoreCase(this.compression)) { + compression = CompressionMethod.GZIP; + } + + return OtlpAwsSpanExporter.create(this.parentExporter, this.endpoint, compression); } } diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/OtlpAwsExporterTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/OtlpAwsExporterTest.java index 69f6ab029c..0af51a345a 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/OtlpAwsExporterTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/OtlpAwsExporterTest.java @@ -50,7 +50,10 @@ import software.amazon.awssdk.http.auth.spi.signer.SignRequest.Builder; import software.amazon.awssdk.http.auth.spi.signer.SignedRequest; import software.amazon.awssdk.identity.spi.AwsCredentialsIdentity; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.common.CompressionMethod; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.logs.OtlpAwsLogsExporter; import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.logs.OtlpAwsLogsExporterBuilder; +import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.traces.OtlpAwsSpanExporter; import software.amazon.opentelemetry.javaagent.providers.exporter.otlp.aws.traces.OtlpAwsSpanExporterBuilder; interface OtlpAwsExporterTest { @@ -215,15 +218,51 @@ static class OtlpAwsSpanExporterTest extends AbstractOtlpAwsExporterTest { @BeforeEach @Override void setup() { - when(this.mockExporter.toBuilder()).thenReturn(mockBuilder); - when(this.mockBuilder.setEndpoint(any())).thenReturn(mockBuilder); - when(this.mockBuilder.setMemoryMode(any())).thenReturn(this.mockBuilder); - when(this.mockBuilder.setHeaders(this.headersCaptor.capture())).thenReturn(mockBuilder); - when(this.mockBuilder.build()).thenReturn(this.mockExporter); + lenient().when(this.mockExporter.toBuilder()).thenReturn(mockBuilder); + lenient().when(this.mockBuilder.setEndpoint(any())).thenReturn(mockBuilder); + lenient().when(this.mockBuilder.setMemoryMode(any())).thenReturn(this.mockBuilder); + lenient() + .when(this.mockBuilder.setHeaders(this.headersCaptor.capture())) + .thenReturn(mockBuilder); + lenient().when(this.mockBuilder.build()).thenReturn(this.mockExporter); OtlpAwsExporterTest tester = new MockOtlpAwsSpanExporterWrapper(this.mockExporter); this.init(XRAY_OTLP_ENDPOINT, tester); super.setup(); - when(this.mockExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + lenient().when(this.mockExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + } + + @Test + void testSpanExporterCompressionDefaultsToNone() { + OtlpAwsSpanExporter exporter = + OtlpAwsSpanExporterBuilder.create(this.mockExporter, XRAY_OTLP_ENDPOINT).build(); + assertEquals(CompressionMethod.NONE, exporter.getCompression()); + } + + @Test + void testSpanExporterCompressionCanBeSetToGzip() { + OtlpAwsSpanExporter exporter = + OtlpAwsSpanExporterBuilder.create(this.mockExporter, XRAY_OTLP_ENDPOINT) + .setCompression("gzip") + .build(); + assertEquals(CompressionMethod.GZIP, exporter.getCompression()); + } + + @Test + void testSpanExporterCompressionIgnoresCaseForGzip() { + OtlpAwsSpanExporter exporter = + OtlpAwsSpanExporterBuilder.create(this.mockExporter, XRAY_OTLP_ENDPOINT) + .setCompression("GZIP") + .build(); + assertEquals(CompressionMethod.GZIP, exporter.getCompression()); + } + + @Test + void testSpanExporterCompressionDefaultsToNoneForUnknownValue() { + OtlpAwsSpanExporter exporter = + OtlpAwsSpanExporterBuilder.create(this.mockExporter, XRAY_OTLP_ENDPOINT) + .setCompression("unknown") + .build(); + assertEquals(CompressionMethod.NONE, exporter.getCompression()); } private static final class MockOtlpAwsSpanExporterWrapper implements OtlpAwsExporterTest { @@ -252,15 +291,51 @@ static class OtlpAwsLogsExporterTest extends AbstractOtlpAwsExporterTest { @BeforeEach @Override void setup() { - when(this.mockExporter.toBuilder()).thenReturn(mockBuilder); - when(this.mockBuilder.setEndpoint(any())).thenReturn(mockBuilder); - when(this.mockBuilder.setMemoryMode(any())).thenReturn(this.mockBuilder); - when(this.mockBuilder.setHeaders(this.headersCaptor.capture())).thenReturn(mockBuilder); - when(this.mockBuilder.build()).thenReturn(this.mockExporter); + lenient().when(this.mockExporter.toBuilder()).thenReturn(mockBuilder); + lenient().when(this.mockBuilder.setEndpoint(any())).thenReturn(mockBuilder); + lenient().when(this.mockBuilder.setMemoryMode(any())).thenReturn(this.mockBuilder); + lenient() + .when(this.mockBuilder.setHeaders(this.headersCaptor.capture())) + .thenReturn(mockBuilder); + lenient().when(this.mockBuilder.build()).thenReturn(this.mockExporter); OtlpAwsExporterTest mocker = new MockOtlpAwsLogsExporterWrapper(this.mockExporter); this.init(LOGS_OTLP_ENDPOINT, mocker); super.setup(); - when(this.mockExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + lenient().when(this.mockExporter.export(any())).thenReturn(CompletableResultCode.ofSuccess()); + } + + @Test + void testLogsExporterCompressionDefaultsToNone() { + OtlpAwsLogsExporter exporter = + OtlpAwsLogsExporterBuilder.create(this.mockExporter, LOGS_OTLP_ENDPOINT).build(); + assertEquals(CompressionMethod.NONE, exporter.getCompression()); + } + + @Test + void testLogsExporterCompressionCanBeSetToGzip() { + OtlpAwsLogsExporter exporter = + OtlpAwsLogsExporterBuilder.create(this.mockExporter, LOGS_OTLP_ENDPOINT) + .setCompression("gzip") + .build(); + assertEquals(CompressionMethod.GZIP, exporter.getCompression()); + } + + @Test + void testLogsExporterCompressionIgnoresCaseForGzip() { + OtlpAwsLogsExporter exporter = + OtlpAwsLogsExporterBuilder.create(this.mockExporter, LOGS_OTLP_ENDPOINT) + .setCompression("GZIP") + .build(); + assertEquals(CompressionMethod.GZIP, exporter.getCompression()); + } + + @Test + void testLogsExporterCompressionDefaultsToNoneForUnknownValue() { + OtlpAwsLogsExporter exporter = + OtlpAwsLogsExporterBuilder.create(this.mockExporter, LOGS_OTLP_ENDPOINT) + .setCompression("unknown") + .build(); + assertEquals(CompressionMethod.NONE, exporter.getCompression()); } private static final class MockOtlpAwsLogsExporterWrapper implements OtlpAwsExporterTest { From 2d6a5211daa1afcf0b0835836bde5aeabae4b8f9 Mon Sep 17 00:00:00 2001 From: Jonathan Lee <107072447+jj22ee@users.noreply.github.com> Date: Fri, 1 Aug 2025 19:44:28 -0700 Subject: [PATCH 02/83] [AppSignal E2E Testing] Validate E2E Tests Are Accounted For (#1126) *Issue #, if available:* *Description of changes:* Add new validation workflow: - This validation is to ensure that all ApplicationSignals e2e test workflows relevant to this repo are actually being used in this repo. - See: https://github.com/aws-observability/aws-application-signals-test-framework/blob/main/.github/workflows/validate-e2e-tests-are-accounted-for.yml *Testing:* image By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../workflows/application-signals-e2e-test.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index d3ca04b77d..8d1bad5981 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -245,3 +245,19 @@ jobs: with: aws-region: us-east-1 caller-workflow-name: 'main-build' + + # This validation is to ensure that all test workflows relevant to this repo are actually + # being used in this repo, which is referring to all the other jobs in this file. + # + # If this starts failing, then it most likely means that new e2e test workflow was + # added to `aws-observability/aws-application-signals-test-framework`, but was not + # added to this file. It could also mean that a test in this file has been removed. + # + # If a particular test file is intended to not be tested in this repo and should not + # be failing this particular validation, then choose one of the following options: + # - Add the test file to the exclusions input (CSV format) to the workflow + # (see: https://github.com/aws-observability/aws-application-signals-test-framework/blob/main/.github/workflows/validate-e2e-tests-are-accounted-for.yml#L1) + # - Update the `validate-e2e-tests-are-accounted-for` job to change which "workflow files are expected to be used by this repo" + # (see: https://github.com/aws-observability/aws-application-signals-test-framework/blob/main/.github/workflows/validate-e2e-tests-are-accounted-for.yml) + validate-all-tests-are-accounted-for: + uses: aws-observability/aws-application-signals-test-framework/.github/workflows/validate-e2e-tests-are-accounted-for.yml@main From 15a4e5220047aca9d9e7fc9f186245e2453821d6 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Wed, 6 Aug 2025 16:02:09 -0700 Subject: [PATCH 03/83] Send main build metrics (#1127) *Issue #, if available:* *Description of changes:* Emit a failure metric if main build fails. Since this workflow is triggered with pushes to main or a release branch, we want to be notified if there is a failure with the build process or e2e tests. Tested by temporarily adding an on: push: trigger to my own branch in this repo and testing the updated workflow. Verified that failure metric was successfully published to cloudwatch. https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16783145872 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../e2e-tests-app-with-java-agent.yml | 30 ++++++++--------- .github/workflows/main-build.yml | 32 +++++++++++-------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index b2c4d744bf..d09283cb8f 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -167,18 +167,18 @@ jobs: VALIDATOR_COMMAND: -c spark-otel-trace-metric-validation.yml --endpoint http://app:4567 --metric-namespace aws-otel-integ-test -t ${{ github.run_id }}-${{ github.run_number }} # publish status - publish-build-status: - needs: [ test_Spring_App_With_Java_Agent, test_Spark_App_With_Java_Agent, test_Spark_AWS_SDK_V1_App_With_Java_Agent ] - if: ${{ always() }} - uses: ./.github/workflows/publish-status.yml - with: - namespace: 'ADOT/GitHubActions' - repository: ${{ github.repository }} - branch: ${{ github.ref_name }} - workflow: ${{ inputs.caller-workflow-name }} - success: ${{ needs.test_Spring_App_With_Java_Agent.result == 'success' && - needs.test_Spark_App_With_Java_Agent.result == 'success' && - needs.test_Spark_AWS_SDK_V1_App_With_Java_Agent.result == 'success' }} - region: us-east-1 - secrets: - roleArn: ${{ secrets.METRICS_ROLE_ARN }} + # publish-build-status: + # needs: [ test_Spring_App_With_Java_Agent, test_Spark_App_With_Java_Agent, test_Spark_AWS_SDK_V1_App_With_Java_Agent ] + # if: ${{ always() }} + # uses: ./.github/workflows/publish-status.yml + # with: + # namespace: 'ADOT/GitHubActions' + # repository: ${{ github.repository }} + # branch: ${{ github.ref_name }} + # workflow: ${{ inputs.caller-workflow-name }} + # success: ${{ needs.test_Spring_App_With_Java_Agent.result == 'success' && + # needs.test_Spark_App_With_Java_Agent.result == 'success' && + # needs.test_Spark_AWS_SDK_V1_App_With_Java_Agent.result == 'success' }} + # region: us-east-1 + # secrets: + # roleArn: ${{ secrets.METRICS_ROLE_ARN }} diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index a268380681..c012c3aa34 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -263,16 +263,22 @@ jobs: adot-image-name: ${{ needs.build.outputs.staging-image }} publish-build-status: - needs: [ build, contract-tests ] - if: ${{ always() }} - uses: ./.github/workflows/publish-status.yml - with: - namespace: 'ADOT/GitHubActions' - repository: ${{ github.repository }} - branch: ${{ github.ref_name }} - workflow: main-build - success: ${{ needs.build.result == 'success' && - needs.contract-tests.result == 'success' }} - region: us-east-1 - secrets: - roleArn: ${{ secrets.METRICS_ROLE_ARN }} + name: "Publish Main Build Status" + needs: [ build, e2e-test, contract-tests, application-signals-lambda-layer-build, application-signals-e2e-test ] + runs-on: ubuntu-latest + if: always() + steps: + - name: Configure AWS Credentials for emitting metrics + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} + aws-region: us-east-1 + + - name: Publish main build status + run: | + value="${{ needs.build.result == 'success' && needs.e2e-test.result == 'success' && needs.contract-tests.result == 'success' \ + && needs.application-signals-lambda-layer-build.result == 'success' && \needs.application-signals-e2e-test.result == 'success' && '0.0' || '1.0'}}" + aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ + --metric-name Failure \ + --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=main_build \ + --value $value From 12970e01c47c8e374568e0a994c21d7d2094de2d Mon Sep 17 00:00:00 2001 From: Prashant Srivastava <50466688+srprash@users.noreply.github.com> Date: Fri, 8 Aug 2025 14:43:38 -0400 Subject: [PATCH 04/83] fix typo in main build (#1130) Removing problematic backslash which was making the workflow invalid: https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16834776574/workflow By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/main-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index c012c3aa34..17461f822e 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -277,7 +277,7 @@ jobs: - name: Publish main build status run: | value="${{ needs.build.result == 'success' && needs.e2e-test.result == 'success' && needs.contract-tests.result == 'success' \ - && needs.application-signals-lambda-layer-build.result == 'success' && \needs.application-signals-e2e-test.result == 'success' && '0.0' || '1.0'}}" + && needs.application-signals-lambda-layer-build.result == 'success' && needs.application-signals-e2e-test.result == 'success' && '0.0' || '1.0'}}" aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ --metric-name Failure \ --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=main_build \ From e9cc142c85c66e0026f3143c0769f55746acc315 Mon Sep 17 00:00:00 2001 From: Jeel-mehta <72543735+Jeel-mehta@users.noreply.github.com> Date: Mon, 11 Aug 2025 09:45:24 -0700 Subject: [PATCH 05/83] Fix YAML syntax error in main-build workflow (#1132) *Description of changes:* Removing problematic backslash which was making the workflow invalid: https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16840663861 Fixed in my branch release/v2.11.2 - https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16841343395 Reciprocating the same change in main By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. Co-authored-by: Jeel Mehta --- .github/workflows/main-build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 17461f822e..4a8a4f42cb 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -276,8 +276,7 @@ jobs: - name: Publish main build status run: | - value="${{ needs.build.result == 'success' && needs.e2e-test.result == 'success' && needs.contract-tests.result == 'success' \ - && needs.application-signals-lambda-layer-build.result == 'success' && needs.application-signals-e2e-test.result == 'success' && '0.0' || '1.0'}}" + value="${{ needs.build.result == 'success' && needs.e2e-test.result == 'success' && needs.contract-tests.result == 'success' && needs.application-signals-lambda-layer-build.result == 'success' && needs.application-signals-e2e-test.result == 'success' && '0.0' || '1.0' }}" aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ --metric-name Failure \ --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=main_build \ From 25851ba50d0f5fae8049d0735bc0046f7a3602ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 00:14:43 +0000 Subject: [PATCH 06/83] Bump docker/build-push-action from 5 to 6 (#929) --- .github/workflows/docker-build-corretto-slim.yml | 2 +- .github/workflows/patch-release-build.yml | 4 ++-- .github/workflows/pr-build.yml | 2 +- .github/workflows/release-build.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index ca7528e241..5265309aae 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Build docker image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true context: scripts/docker/corretto-slim diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 4cbc3965fa..5e80b52398 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -114,7 +114,7 @@ jobs: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -128,7 +128,7 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index fd2d7cf2ae..1c4e5f9810 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -131,7 +131,7 @@ jobs: if: ${{ matrix.os == 'ubuntu-latest' }} - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 if: ${{ matrix.os == 'ubuntu-latest' }} with: push: false diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index ce9d29ddc8..372a796889 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -84,7 +84,7 @@ jobs: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -98,7 +98,7 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" From bb2a092884613886f3dde27ff8d643bc5cc28f29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 15:36:18 +0000 Subject: [PATCH 07/83] Bump actions/download-artifact from 4 to 5 (#1136) --- .github/workflows/application-signals-e2e-test.yml | 2 +- .github/workflows/release-lambda.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index 8d1bad5981..cb5abc2d66 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -31,7 +31,7 @@ jobs: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 - - uses: actions/download-artifact@v4 + - uses: actions/download-artifact@v5 with: name: aws-opentelemetry-agent.jar diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index 3257392a64..8390369a7c 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -98,7 +98,7 @@ jobs: echo BUCKET_NAME=java-lambda-layer-${{ github.run_id }}-${{ matrix.aws_region }} | tee --append $GITHUB_ENV - name: download layer.zip - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: aws-opentelemetry-java-layer.zip @@ -157,7 +157,7 @@ jobs: uses: actions/checkout@v4 - uses: hashicorp/setup-terraform@v2 - name: download layerARNs - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: pattern: ${{ env.LAYER_NAME }}-* path: ${{ env.LAYER_NAME }} @@ -206,7 +206,7 @@ jobs: echo "}" >> ../layer_cdk cat ../layer_cdk - name: download layer.zip - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v5 with: name: layer.zip - name: Get commit hash From e7fc9799083feace813a750b449035db642dc58b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 16:32:57 +0000 Subject: [PATCH 08/83] Bump org.apache.tomcat.embed:tomcat-embed-core from 10.1.10 to 11.0.6 (#1072) --- appsignals-tests/images/http-servers/tomcat/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appsignals-tests/images/http-servers/tomcat/build.gradle.kts b/appsignals-tests/images/http-servers/tomcat/build.gradle.kts index a9456b57ce..c770ec4bb4 100644 --- a/appsignals-tests/images/http-servers/tomcat/build.gradle.kts +++ b/appsignals-tests/images/http-servers/tomcat/build.gradle.kts @@ -31,7 +31,7 @@ application { } dependencies { - implementation("org.apache.tomcat.embed:tomcat-embed-core:10.1.10") + implementation("org.apache.tomcat.embed:tomcat-embed-core:11.0.6") implementation("io.opentelemetry:opentelemetry-api") } From 0a87c6cf5edfbea0b3d240ea922d5a7bb17a0561 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 19:08:29 +0000 Subject: [PATCH 09/83] Bump actions/setup-java from 3 to 4 (#1138) --- .github/workflows/release-udp-exporter.yml | 2 +- .github/workflows/udp-exporter-e2e-test.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index e200a7c3a9..4ec892e421 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -29,7 +29,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Java - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'temurin' diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index ead8a1f953..bbda252e70 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Java - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: java-version: '17' distribution: 'temurin' From 7ffb3d4f9200b10f7701926ff240dd5c0b36d136 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 22:54:15 +0000 Subject: [PATCH 10/83] Bump burrunan/gradle-cache-action from 1 to 2 (#931) --- .github/workflows/soak-testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index c007d3fb4e..cc3e742a6a 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -110,7 +110,7 @@ jobs: aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws - name: Build Sample App locally directly to the Docker daemon - uses: burrunan/gradle-cache-action@v1 + uses: burrunan/gradle-cache-action@v2 with: arguments: jibDockerBuild env: From aecbf9d61797d370dd13b11e5b6658a88b196eb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 23:54:47 +0000 Subject: [PATCH 11/83] Bump codecov/codecov-action from 3 to 5 (#954) --- .github/workflows/e2e-tests-app-with-java-agent.yml | 2 +- .github/workflows/pr-build.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index d09283cb8f..2e3da3c440 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -71,7 +71,7 @@ jobs: env: COMMIT_HASH: ${{ inputs.image_tag }} - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v5 test_Spring_App_With_Java_Agent: name: Test Spring App with AWS OTel Java agent diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 1c4e5f9810..4e24101630 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -158,7 +158,7 @@ jobs: if: ${{ matrix.os != 'ubuntu-latest' && (hashFiles('.github/patches/opentelemetry-java*.patch') == '' || matrix.os != 'windows-latest' ) }} # build on windows as well unless a patch exists with: arguments: build --stacktrace -PenableCoverage=true - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v5 build-lambda: runs-on: ubuntu-latest From 848353c8a5fc5f8ac792118d887e81b3e774996d Mon Sep 17 00:00:00 2001 From: Jeel-mehta <72543735+Jeel-mehta@users.noreply.github.com> Date: Tue, 12 Aug 2025 20:20:57 -0700 Subject: [PATCH 12/83] Update image scan to point to 2.11.2 release (#1142) *Description of changes:* Update owasp.yml to scan 2.11.2 release. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. Co-authored-by: Jeel Mehta --- .github/workflows/owasp.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index 54ce812326..cc6e36489b 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -97,7 +97,7 @@ jobs: id: high_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" severity: 'CRITICAL,HIGH' - name: Perform low image scan on v2 @@ -105,7 +105,7 @@ jobs: id: low_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" severity: 'MEDIUM,LOW,UNKNOWN' - name: Configure AWS Credentials for emitting metrics From c5ed80f2db3bf28e7282f34ce81ff0cf647af3eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 07:53:57 -0700 Subject: [PATCH 13/83] Bump actions/checkout from 4 to 5 (#1143) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5.
Release notes

Sourced from actions/checkout's releases.

v5.0.0

What's Changed

⚠️ Minimum Compatible Runner Version

v2.327.1
Release Notes

Make sure your runner is updated to this version or newer to use this release.

Full Changelog: https://github.com/actions/checkout/compare/v4...v5.0.0

v4.3.0

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v4...v4.3.0

v4.2.2

What's Changed

Full Changelog: https://github.com/actions/checkout/compare/v4.2.1...v4.2.2

v4.2.1

What's Changed

New Contributors

Full Changelog: https://github.com/actions/checkout/compare/v4.2.0...v4.2.1

... (truncated)

Changelog

Sourced from actions/checkout's changelog.

Changelog

V5.0.0

V4.3.0

v4.2.2

v4.2.1

v4.2.0

v4.1.7

v4.1.6

v4.1.5

v4.1.4

v4.1.3

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/checkout&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docker-build-corretto-slim.yml | 2 +- .../docker-build-smoke-tests-fake-backend.yml | 2 +- .github/workflows/e2e-tests-app-with-java-agent.yml | 8 ++++---- .github/workflows/e2e-tests-with-operator.yml | 10 +++++----- .github/workflows/main-build.yml | 8 ++++---- .github/workflows/nightly-upstream-snapshot-build.yml | 4 ++-- .github/workflows/owasp.yml | 2 +- .github/workflows/patch-release-build.yml | 6 +++--- .github/workflows/pr-build.yml | 6 +++--- .github/workflows/release-build.yml | 2 +- .github/workflows/release-lambda.yml | 4 ++-- .github/workflows/release-udp-exporter.yml | 2 +- .github/workflows/soak-testing.yml | 2 +- .github/workflows/udp-exporter-e2e-test.yml | 2 +- 15 files changed, 31 insertions(+), 31 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 024cef746b..cea96c8cbe 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,7 +23,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index 5265309aae..cab6cc17ac 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -19,7 +19,7 @@ jobs: build-corretto: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: gradle/wrapper-validation-action@v1 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 7bcb06a6fe..a94c5752c7 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -20,7 +20,7 @@ jobs: build-docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: java-version: 17 diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index 2e3da3c440..f2fe9df20c 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 @@ -79,7 +79,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: @@ -110,7 +110,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: @@ -141,7 +141,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index ffacf0c74c..ab207d1a6d 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -34,7 +34,7 @@ jobs: build-sample-app: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 @@ -84,14 +84,14 @@ jobs: test-case-batch-value: ${{ steps.set-batches.outputs.batch-values }} steps: - name: Checkout Testing Framework repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework ref: ${{ inputs.test_ref }} - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 path: aws-otel-java-instrumentation @@ -126,7 +126,7 @@ jobs: steps: # required for versioning - name: Checkout Java Instrumentation repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 path: aws-otel-java-instrumentation @@ -151,7 +151,7 @@ jobs: role-duration-seconds: 14400 - name: Checkout Testing Framework repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 4a8a4f42cb..c5e3f2f020 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -22,7 +22,7 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: java-version: 17 @@ -54,7 +54,7 @@ jobs: staging_registry: ${{ steps.imageOutput.outputs.stagingRegistry }} staging_repository: ${{ steps.imageOutput.outputs.stagingRepository }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/setup-java@v4 @@ -189,7 +189,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/setup-java@v4 @@ -229,7 +229,7 @@ jobs: application-signals-lambda-layer-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/setup-java@v4 diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index c97db6704c..cd9ccf0da6 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -23,7 +23,7 @@ jobs: image_name: ${{ steps.imageOutput.outputs.imageName }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 @@ -129,7 +129,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/setup-java@v4 diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index cc6e36489b..7be0a989a0 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo for dependency scan - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 5e80b52398..725661e5aa 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -37,14 +37,14 @@ jobs: name: Check out release branch # Will fail if there is no release branch yet or succeed otherwise continue-on-error: true - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: ref: ${{ steps.parse-release-branch.outputs.release-branch-name }} - id: checkout-release-tag name: Check out release tag # If there is already a release branch, the previous step succeeds and we don't run this or the next one. if: ${{ steps.checkout-release-branch.outcome == 'failure' }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: ref: ${{ steps.parse-release-branch.outputs.release-tag-name }} - name: Create release branch @@ -57,7 +57,7 @@ jobs: needs: prepare-release-branch steps: - name: Checkout release branch - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: ref: ${{ needs.prepare-release-branch.outputs.release-branch-name }} diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 4e24101630..85215e88e1 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -12,7 +12,7 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: @@ -54,7 +54,7 @@ jobs: # https://github.com/open-telemetry/opentelemetry-java/issues/4560 - os: ${{ startsWith(github.event.pull_request.base.ref, 'release/v') && 'windows-latest' || '' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: @@ -164,7 +164,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Java uses: actions/setup-java@v4 diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 372a796889..0221866cf4 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -24,7 +24,7 @@ jobs: environment: Release runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: java-version: 17 diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index 8390369a7c..1d2068e830 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -40,7 +40,7 @@ jobs: echo "aws_regions_json=${MATRIX}" >> $GITHUB_OUTPUT - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - uses: actions/setup-java@v4 with: @@ -154,7 +154,7 @@ jobs: needs: publish-prod steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - uses: hashicorp/setup-terraform@v2 - name: download layerARNs uses: actions/download-artifact@v5 diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index 4ec892e421..8d39e7308c 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -26,7 +26,7 @@ jobs: needs: validate-udp-exporter-e2e-test steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Java uses: actions/setup-java@v4 diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index cc3e742a6a..f7c1ec8175 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -63,7 +63,7 @@ jobs: run: | echo "TEST_DURATION_MINUTES=${{ github.event.inputs.test_duration_minutes || env.DEFAULT_TEST_DURATION_MINUTES }}" | tee --append $GITHUB_ENV; - name: Clone This Repo @ ${{ env.TARGET_SHA }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: ref: ${{ env.TARGET_SHA }} diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index bbda252e70..33ad3d756a 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set up Java uses: actions/setup-java@v4 From b091e6a88634b8f88f97f3193d82239178b48135 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 15:53:32 +0000 Subject: [PATCH 14/83] Bump uuid from 1.5.0 to 1.16.0 in /tools/cp-utility (#1055) --- tools/cp-utility/Cargo.lock | 106 ++++++++++++++++++++++++++++++------ tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 91 insertions(+), 17 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index a1aa5dd68a..9e9036e893 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "bitflags" @@ -46,20 +46,21 @@ checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "getrandom" -version = "0.2.8" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" dependencies = [ "cfg-if", "libc", + "r-efi", "wasi", ] [[package]] name = "libc" -version = "0.2.151" +version = "0.2.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "linux-raw-sys" @@ -73,22 +74,46 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "proc-macro2" +version = "1.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + [[package]] name = "rand" -version = "0.8.5" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" dependencies = [ - "libc", "rand_chacha", "rand_core", + "zerocopy", ] [[package]] name = "rand_chacha" -version = "0.3.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", "rand_core", @@ -96,9 +121,9 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ "getrandom", ] @@ -125,6 +150,17 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "syn" +version = "2.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "tempfile" version = "3.9.0" @@ -138,11 +174,17 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + [[package]] name = "uuid" -version = "1.5.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" +checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" dependencies = [ "getrandom", "rand", @@ -150,9 +192,12 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] [[package]] name = "windows-sys" @@ -219,3 +264,32 @@ name = "windows_x86_64_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.4.1", +] + +[[package]] +name = "zerocopy" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index 7169660e4d..fdd8bff094 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -11,7 +11,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests tempfile = "3.9.0" -uuid = { version = "1.5.0", features = ["v4", "fast-rng"] } +uuid = { version = "1.16.0", features = ["v4", "fast-rng"] } [profile.release] # Levers to optimize the binary for size From 0dcd07479164a705e0b407cda88f531c758d5e2f Mon Sep 17 00:00:00 2001 From: Steve Liu Date: Wed, 13 Aug 2025 10:48:04 -0700 Subject: [PATCH 15/83] Change scheduled daily scan to run 3 times per day (#1140) *Description of changes:* Daily Scan failure metrics often show up for one day and are back to normal 24 hours later. The workflow error for these failures is always some timeout/transient issue that goes away if the workflow is manually re-run. We want to try to avoid alarming on these failures while catching actual, repeated failures. Moving the cadence of the daily scan to run 3 times per day with 12-6-6 hour intervals: 02:00, 14:00, 20:00 UTC times. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/owasp.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index 7be0a989a0..0f8559b6a1 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -8,8 +8,10 @@ name: Daily scan on: - schedule: - - cron: '0 18 * * *' # scheduled to run at 18:00 UTC every day + schedule: # scheduled to run at 14:00, 20:00, 02:00 UTC every day + - cron: '0 14 * * *' # 6:00/7:00 PST/PDT (14:00 UTC) + - cron: '0 20 * * *' # 12:00/13:00 PST/PDT (20:00 UTC) + - cron: '0 02 * * *' # 18:00/19:00 PST/PDT (02:00 UTC) workflow_dispatch: # be able to run the workflow on demand env: From 3f34807bc525fc0154a8a755b53a2ac12573e790 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 18:55:02 +0000 Subject: [PATCH 16/83] Bump aws-actions/aws-secretsmanager-get-secrets from 1 to 2 (#930) --- .github/workflows/owasp.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index 0f8559b6a1..f0e7078959 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -43,7 +43,7 @@ jobs: aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get NVD API key for dependency scan - uses: aws-actions/aws-secretsmanager-get-secrets@v1 + uses: aws-actions/aws-secretsmanager-get-secrets@v2 id: nvd_api_key with: secret-ids: ${{ secrets.NVD_API_KEY_SECRET_ARN }} From d471c70bd9d823dde7d2d0b767a093718b0d17fe Mon Sep 17 00:00:00 2001 From: Steve Liu Date: Wed, 13 Aug 2025 13:03:34 -0700 Subject: [PATCH 17/83] Add docker login for daily scan workflow image scanning (#1144) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit *Description of changes:* The current daily scan's image scan workflow would often fail with the following error: ``` 2025-08-12T22:35:36Z INFO [vuln] Vulnerability scanning is enabled 2025-08-12T22:35:36Z INFO [secret] Secret scanning is enabled 2025-08-12T22:35:36Z INFO [secret] If your scanning is slow, please try '--scanners vuln' to disable secret scanning 2025-08-12T22:35:36Z INFO [secret] Please see also https://trivy.dev/v0.64/docs/scanner/secret#recommendation for faster secret detection 📣 Notices: - Version 0.65.0 of Trivy is now available, current version is 0.64.1 To suppress version checks, run Trivy scans with the --skip-version-check flag 2025-08-12T22:35:37Z FATAL Fatal error run error: image scan error: scan error: unable to initialize a scan service: unable to initialize an image scan service: unable to find the specified image "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1" in ["docker" "containerd" "podman" "remote"]: 4 errors occurred: * docker error: unable to inspect the image (public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1): Error response from daemon: No such image: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.1 * containerd error: failed to list images from containerd client: connection error: desc = "transport: Error while dialing: dial unix /run/containerd/containerd.sock: connect: permission denied" * podman error: unable to initialize Podman client: no podman socket found: stat /run/user/1001/podman/podman.sock: no such file or directory * remote error: GET https://public.ecr.aws/v2/aws-observability/adot-autoinstrumentation-java/manifests/sha256:7ebd362ec33ad1fa0218535540cec4db3165364fe0715b892e90afdf2374b531: TOOMANYREQUESTS: Rate exceeded ``` Turns out the issue is related to making unauthenticated GET request calls to public ECR images. We make these calls both in the `pr_build` (explanation can be found in the code comment) and in `owasp.yml`. Likely, our GET requests to pull the ADOT image are being throttled as a result. https://github.com/aws-observability/aws-otel-java-instrumentation/blob/7ffb3d4f9200b10f7701926ff240dd5c0b36d136/.github/actions/image_scan/action.yml#L24 - Adding an intermediary step to log-in to ECR before making the GET request calls for `owasp.yml` image scanning. **Testing** 200 Image Scan test runs with docker login (0 failed jobs): https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16922020570/job/47950156083 200 Image Scan test runs w/o docker login (1 failed job, rest didn't run): https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/16922512730/job/47951639594 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/actions/image_scan/action.yml | 6 ++++++ .github/workflows/owasp.yml | 15 +++++++++++++++ .github/workflows/pr-build.yml | 1 + 3 files changed, 22 insertions(+) diff --git a/.github/actions/image_scan/action.yml b/.github/actions/image_scan/action.yml index eb19f78609..7a98a5906a 100644 --- a/.github/actions/image_scan/action.yml +++ b/.github/actions/image_scan/action.yml @@ -11,6 +11,11 @@ inputs: severity: required: true description: "List of severities that will cause a failure" + logout: + required: true + description: | + Whether to logout of public AWS ECR. Set to 'true' for PR workflows to avoid potential call failures, + 'false' for daily scans which has a higher bar for passing regularly and specifically wants to sign in. runs: using: "composite" @@ -22,6 +27,7 @@ runs: # ensure we can make unauthenticated call. This is important for making the pr_build workflow run on # PRs created from forked repos. - name: Logout of public AWS ECR + if: inputs.logout == 'true' shell: bash run: docker logout public.ecr.aws diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index f0e7078959..f0297648c1 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -78,6 +78,17 @@ jobs: if: ${{ steps.dep_scan.outcome != 'success' }} run: less dependency-check-report.html + - name: Configure AWS credentials for image scan + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Login to Public ECR + uses: docker/login-action@v3 + with: + registry: public.ecr.aws + - name: Perform high image scan on v1 if: always() id: high_scan_v1 @@ -85,6 +96,7 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v1.33.0" severity: 'CRITICAL,HIGH' + logout: 'false' - name: Perform low image scan on v1 if: always() @@ -93,6 +105,7 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v1.33.0" severity: 'MEDIUM,LOW,UNKNOWN' + logout: 'false' - name: Perform high image scan on v2 if: always() @@ -101,6 +114,7 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" severity: 'CRITICAL,HIGH' + logout: 'false' - name: Perform low image scan on v2 if: always() @@ -109,6 +123,7 @@ jobs: with: image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" severity: 'MEDIUM,LOW,UNKNOWN' + logout: 'false' - name: Configure AWS Credentials for emitting metrics if: always() diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 85215e88e1..f068d52b6c 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -147,6 +147,7 @@ jobs: with: image-ref: ${{ env.TEST_TAG }} severity: 'CRITICAL,HIGH,MEDIUM,LOW,UNKNOWN' + logout: 'true' - name: Test docker image if: ${{ matrix.os == 'ubuntu-latest' }} From 25b2cd864dfa22d7ddfb537a88a1e846510f3002 Mon Sep 17 00:00:00 2001 From: Anahat Date: Wed, 13 Aug 2025 15:53:24 -0700 Subject: [PATCH 18/83] Instrumentation Patch Removal and SPI AWS SDK Test Addition (#1120) This is the final PR for the SPI aws-sdk instrumentation. It removes the [opentelemetry-java-instrumentation](https://github.com/aws-observability/aws-otel-java-instrumentation/blob/main/.github/patches/opentelemetry-java-instrumentation.patch) patch and adds comprehensive unit test coverage for AWS experimental attributes in both AWS SDK v1.11 and v2.2 instrumentation packages. The v2.2 package introduces 29 new experimental attributes while v1.11 adds 23 new experimental attributes. All attributes are now tested through unit tests and/or contract tests. ### Description of changes: #### AWS SDK v2.2 (awssdk_v2_2) New attributes being tested: 1. AWS_BUCKET_NAME - testS3ExperimentalAttributes() & contract tests 2. AWS_QUEUE_URL - testSqsExperimentalAttributes() & contract tests 3. AWS_QUEUE_NAME - contract tests 4. AWS_STREAM_NAME - testKinesisExperimentalAttributes() & contract tests 5. AWS_STREAM_ARN - testKinesisExperimentalAttributes() & contract tests 6. AWS_TABLE_NAME - testDynamoDbExperimentalAttributes() 7. AWS_GUARDRAIL_ID - contract tests 8. AWS_GUARDRAIL_ARN - contract tests 9. AWS_AGENT_ID - testBedrockAgentExperimentalAttributes() & contract tests 10. AWS_DATA_SOURCE_ID - testBedrockDataSourceExperimentalAttributes() & contract tests 11. AWS_KNOWLEDGE_BASE_ID - testBedrockKnowledgeBaseExperimentalAttributes() & contract tests 12. GEN_AI_MODEL - testBedrockExperimentalAttributes() & contract tests 13. GEN_AI_SYSTEM - contract tests 14. GEN_AI_REQUEST_MAX_TOKENS - testBedrockExperimentalAttributes() & contract tests 15. GEN_AI_REQUEST_TEMPERATURE - testBedrockExperimentalAttributes() & contract tests 16. GEN_AI_REQUEST_TOP_P - contract tests 17. GEN_AI_RESPONSE_FINISH_REASONS - contract tests 18. GEN_AI_USAGE_INPUT_TOKENS - contract tests 19. GEN_AI_USAGE_OUTPUT_TOKENS - contract tests 20. AWS_STATE_MACHINE_ARN - testStepFunctionExperimentalAttributes() & contract tests 21. AWS_STEP_FUNCTIONS_ACTIVITY_ARN - testStepFunctionExperimentalAttributes() & contract tests 22. AWS_SNS_TOPIC_ARN - testSnsExperimentalAttributes() & contract tests 23. AWS_SECRET_ARN - testSecretsManagerExperimentalAttributes() & contract tests 24. AWS_LAMBDA_NAME - testLambdaExperimentalAttributes() 25. AWS_LAMBDA_ARN - testLambdaArnExperimentalAttribute() 26. AWS_LAMBDA_RESOURCE_ID - testLambdaResourceIdExperimentalAttribute() 27. AWS_TABLE_ARN - testTableArnExperimentalAttribute() 28. AWS_AUTH_ACCESS_KEY - testAuthAccessKeyExperimentalAttribute() 29. AWS_AUTH_REGION - testAuthRegionExperimentalAttribute() - Tests leverage AWS SDK v2's getValueForField() API for clean, mockable attribute extraction - Includes comprehensive testing for: - Core AWS services (S3, DynamoDB, SQS, SNS, Kinesis, Lambda, Step Functions, Secrets Manager) - Bedrock Gen AI attributes with JSON parsing validation - Bedrock resource attributes (Agent, Knowledge Base, Data Source) - Authentication attributes (access key, region) ### AWS SDK v1.11 (awssdk_v1_11) New attributes being tested: 1. AWS_STREAM_ARN - testKinesisExperimentalAttributes() & contract tests 2. AWS_TABLE_ARN - testTableArnExperimentalAttributes() (Service identification only) 3. AWS_AGENT_ID - contract tests 4. AWS_KNOWLEDGE_BASE_ID - contract tests 5. AWS_DATA_SOURCE_ID - contract tests 6. AWS_GUARDRAIL_ID - testBedrockGuardrailAttributes() (Service identification only) & contract tests 7. AWS_GUARDRAIL_ARN - testBedrockGuardrailAttributes() (Service identification only) & contract tests 8. AWS_BEDROCK_RUNTIME_MODEL_ID - testBedrockRuntimeAttributes() (Service identification only) & contract tests 9. AWS_BEDROCK_SYSTEM - contract tests 10. GEN_AI_REQUEST_MAX_TOKENS - contract tests 11. GEN_AI_REQUEST_TEMPERATURE - contract tests 12. GEN_AI_REQUEST_TOP_P - contract tests 13. GEN_AI_RESPONSE_FINISH_REASONS - contract tests 14. GEN_AI_USAGE_INPUT_TOKENS - contract tests 15. GEN_AI_USAGE_OUTPUT_TOKENS - contract tests 16. AWS_STATE_MACHINE_ARN - testStepFunctionsExperimentalAttributes() & contract tests 17. AWS_STEP_FUNCTIONS_ACTIVITY_ARN - contract tests 18. AWS_SNS_TOPIC_ARN - testSnsExperimentalAttributes() & contract tests 19. AWS_SECRET_ARN - testSecretsManagerExperimentalAttributes() (Service identification only) & contract tests 20. AWS_LAMBDA_NAME - testLambdaNameExperimentalAttributes() 21. AWS_LAMBDA_ARN - testLambdaArnExperimentalAttributes() 22. AWS_LAMBDA_RESOURCE_ID - testLambdaResourceIdExperimentalAttributes() (Service identification only) 23. AWS_AUTH_ACCESS_KEY - testAuthAccessKeyAttributes() *V1.11 is harder to test:* V1.11 uses Java reflection to dynamically find and call methods like getFunctionName() on AWS request objects at runtime. This creates several testing challenges: - Mock Method Mismatch: When you mock an AWS request object, it doesn't have the actual methods that reflection is trying to find. The reflection silently fails and returns null, making tests pass even though no attributes were extracted. - Class Dependencies: To test properly, you'd need real AWS SDK classes instead of mocks, creating tight coupling between tests and external dependencies. - Nested Object Complexity: Many attributes require traversing nested properties, which means mocking entire object graphs with proper method chains. Contract tests sidestep these issues by using real AWS SDK objects against LocalStack, testing the complete end-to-end flow including actual reflection behavior without the complexity of mocking Java's reflection system. ### Related - PRs for aws-sdk v1.11: #1115 and #1117 - PRs for aws-sdk v2.2: #1111 and #1113 - Replaces patch: [current patch](https://github.com/aws-observability/aws-otel-java-instrumentation/blob/main/.github/patches/opentelemetry-java-instrumentation.patch) By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/actions/patch-dependencies/action.yml | 25 +- .../opentelemetry-java-instrumentation.patch | 4193 ----------------- .github/scripts/patch.sh | 14 - dependencyManagement/build.gradle.kts | 2 +- instrumentation/aws-sdk/README.md | 26 +- instrumentation/aws-sdk/build.gradle.kts | 15 +- ...dkExperimentalAttributesInjectionTest.java | 220 + ...dkExperimentalAttributesInjectionTest.java | 274 ++ lambda-layer/build-layer.sh | 3 - .../aws-otel-java-instrumentation.patch | 8 +- .../opentelemetry-java-instrumentation.patch | 4 +- scripts/local_patch.sh | 24 - 12 files changed, 533 insertions(+), 4275 deletions(-) delete mode 100644 .github/patches/opentelemetry-java-instrumentation.patch create mode 100644 instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v1_11/AwsSdkExperimentalAttributesInjectionTest.java create mode 100644 instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v2_2/AwsSdkExperimentalAttributesInjectionTest.java diff --git a/.github/actions/patch-dependencies/action.yml b/.github/actions/patch-dependencies/action.yml index 048c480540..9281534275 100644 --- a/.github/actions/patch-dependencies/action.yml +++ b/.github/actions/patch-dependencies/action.yml @@ -2,7 +2,7 @@ name: "Patch dependencies" description: | Patches direct dependencies of this project leveraging maven local to publish the results. - This workflow supports patching opentelemetry-java and opentelemetry-java-instrumentation repositories by executing + This workflow supports patching opentelemetry-java and opentelemetry-java-contrib repositories by executing the `patch.sh` script that will try to patch those repositories and after that will optionally test and then publish the artifacts to maven local. To add a patch you have to add a file in the `.github/patches/` directory with the name of the repository that must @@ -49,9 +49,6 @@ runs: if [[ -f .github/patches/opentelemetry-java.patch ]]; then echo 'patch_otel_java=true' >> $GITHUB_ENV fi - if [[ -f .github/patches/opentelemetry-java-instrumentation.patch ]]; then - echo 'patch_otel_java_instrumentation=true' >> $GITHUB_ENV - fi if [[ -f .github/patches/opentelemetry-java-contrib.patch ]]; then echo 'patch_otel_java_contrib=true' >> $GITHUB_ENV fi @@ -60,7 +57,6 @@ runs: - name: Clone and patch repositories run: .github/scripts/patch.sh if: ${{ env.patch_otel_java == 'true' || - env.patch_otel_java_instrumentation == 'true' || env.patch_otel_java_contrib == 'true' }} shell: bash @@ -101,22 +97,3 @@ runs: run: rm -rf opentelemetry-java-contrib if: ${{ env.patch_otel_java_contrib == 'true' }} shell: bash - - - name: Build opentelemetry-java-instrumentation with tests - uses: gradle/gradle-build-action@v2 - if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests != 'false' }} - with: - arguments: check -x spotlessCheck publishToMavenLocal - build-root-directory: opentelemetry-java-instrumentation - - - name: Build opentelemetry java instrumentation - uses: gradle/gradle-build-action@v2 - if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests == 'false' }} - with: - arguments: publishToMavenLocal - build-root-directory: opentelemetry-java-instrumentation - - - name: cleanup opentelmetry-java-instrumentation - run: rm -rf opentelemetry-java-instrumentation - if: ${{ env.patch_otel_java_instrumentation == 'true' }} - shell: bash diff --git a/.github/patches/opentelemetry-java-instrumentation.patch b/.github/patches/opentelemetry-java-instrumentation.patch deleted file mode 100644 index 91e5b2730c..0000000000 --- a/.github/patches/opentelemetry-java-instrumentation.patch +++ /dev/null @@ -1,4193 +0,0 @@ -diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-annotations.txt b/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-annotations.txt -index 93437ef1e0..3f564d25bc 100644 ---- a/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-annotations.txt -+++ b/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-annotations.txt -@@ -1,2 +1,2 @@ - Comparing source compatibility of opentelemetry-instrumentation-annotations-2.11.0.jar against opentelemetry-instrumentation-annotations-2.10.0.jar --No changes. -\ No newline at end of file -+No changes. -diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-api.txt b/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-api.txt -index d759eed30a..385bd90663 100644 ---- a/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-api.txt -+++ b/docs/apidiffs/current_vs_latest/opentelemetry-instrumentation-api.txt -@@ -1,2 +1,2 @@ - Comparing source compatibility of opentelemetry-instrumentation-api-2.11.0.jar against opentelemetry-instrumentation-api-2.10.0.jar --No changes. -\ No newline at end of file -+No changes. -diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-autoconfigure.txt b/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-autoconfigure.txt -index f657f219ae..2b4a59db8f 100644 ---- a/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-autoconfigure.txt -+++ b/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-autoconfigure.txt -@@ -1,2 +1,2 @@ - Comparing source compatibility of opentelemetry-spring-boot-autoconfigure-2.11.0.jar against opentelemetry-spring-boot-autoconfigure-2.10.0.jar --No changes. -\ No newline at end of file -+No changes. -diff --git a/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-starter.txt b/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-starter.txt -index 02f520fd45..99505334b7 100644 ---- a/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-starter.txt -+++ b/docs/apidiffs/current_vs_latest/opentelemetry-spring-boot-starter.txt -@@ -1,2 +1,2 @@ - Comparing source compatibility of opentelemetry-spring-boot-starter-2.11.0.jar against opentelemetry-spring-boot-starter-2.10.0.jar --No changes. -\ No newline at end of file -+No changes. -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/build.gradle.kts -index f357a19f88..fa90530579 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/build.gradle.kts -@@ -47,6 +47,14 @@ dependencies { - testLibrary("com.amazonaws:aws-java-sdk-kinesis:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-dynamodb:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-sns:1.11.106") -+ testLibrary("com.amazonaws:aws-java-sdk-sqs:1.11.106") -+ testLibrary("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") -+ testLibrary("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") -+ testLibrary("com.amazonaws:aws-java-sdk-lambda:1.11.678") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrock:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagent:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagentruntime:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockruntime:1.12.744") - - testImplementation(project(":instrumentation:aws-sdk:aws-sdk-1.11:testing")) - -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/AwsSpanAssertions.java b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/AwsSpanAssertions.java -index 483a0c5230..5b1ee9ac4a 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/AwsSpanAssertions.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/AwsSpanAssertions.java -@@ -37,6 +37,7 @@ class AwsSpanAssertions { - satisfies(stringKey("aws.endpoint"), v -> v.isInstanceOf(String.class)), - equalTo(stringKey("aws.queue.name"), queueName), - equalTo(stringKey("aws.queue.url"), queueUrl), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), - satisfies(AWS_REQUEST_ID, v -> v.isInstanceOf(String.class)), - equalTo(RPC_METHOD, rpcMethod), - equalTo(RPC_SYSTEM, "aws-api"), -@@ -71,6 +72,7 @@ class AwsSpanAssertions { - equalTo(RPC_METHOD, rpcMethod), - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "Amazon S3"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), - equalTo(HTTP_REQUEST_METHOD, requestMethod), - equalTo(HTTP_RESPONSE_STATUS_CODE, responseStatusCode), - satisfies(URL_FULL, val -> val.startsWith("http://")), -@@ -85,28 +87,52 @@ class AwsSpanAssertions { - } - - static SpanDataAssert sns(SpanDataAssert span, String topicArn, String rpcMethod) { -+ SpanDataAssert spanAssert = -+ span.hasName("SNS." + rpcMethod).hasKind(SpanKind.CLIENT).hasNoParent(); - -- return span.hasName("SNS." + rpcMethod) -- .hasKind(SpanKind.CLIENT) -- .hasNoParent() -- .hasAttributesSatisfyingExactly( -- equalTo(stringKey("aws.agent"), "java-aws-sdk"), -- equalTo(MESSAGING_DESTINATION_NAME, topicArn), -- satisfies(stringKey("aws.endpoint"), v -> v.isInstanceOf(String.class)), -- satisfies(AWS_REQUEST_ID, v -> v.isInstanceOf(String.class)), -- equalTo(RPC_METHOD, rpcMethod), -- equalTo(RPC_SYSTEM, "aws-api"), -- equalTo(RPC_SERVICE, "AmazonSNS"), -- equalTo(HTTP_REQUEST_METHOD, "POST"), -- equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -- satisfies(URL_FULL, val -> val.startsWith("http://")), -- satisfies(SERVER_ADDRESS, v -> v.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -- satisfies( -- SERVER_PORT, -- val -> -- val.satisfiesAnyOf( -- v -> assertThat(v).isNull(), -- v -> assertThat(v).isInstanceOf(Number.class)))); -+ // For CreateTopic, the topicArn parameter might be null but aws.sns.topic.arn -+ // will be set from the response -+ if ("CreateTopic".equals(rpcMethod)) { -+ return spanAssert.hasAttributesSatisfyingExactly( -+ equalTo(stringKey("aws.agent"), "java-aws-sdk"), -+ satisfies(stringKey("aws.endpoint"), v -> v.isInstanceOf(String.class)), -+ satisfies(AWS_REQUEST_ID, v -> v.isInstanceOf(String.class)), -+ equalTo(RPC_METHOD, rpcMethod), -+ equalTo(RPC_SYSTEM, "aws-api"), -+ equalTo(RPC_SERVICE, "AmazonSNS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), -+ equalTo(HTTP_REQUEST_METHOD, "POST"), -+ equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -+ satisfies(URL_FULL, val -> val.startsWith("http://")), -+ satisfies(SERVER_ADDRESS, v -> v.isInstanceOf(String.class)), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ satisfies( -+ SERVER_PORT, -+ val -> -+ val.satisfiesAnyOf( -+ v -> assertThat(v).isNull(), v -> assertThat(v).isInstanceOf(Number.class))), -+ satisfies(stringKey("aws.sns.topic.arn"), v -> v.isInstanceOf(String.class))); -+ } -+ -+ return spanAssert.hasAttributesSatisfyingExactly( -+ equalTo(stringKey("aws.agent"), "java-aws-sdk"), -+ equalTo(MESSAGING_DESTINATION_NAME, topicArn), -+ satisfies(stringKey("aws.endpoint"), v -> v.isInstanceOf(String.class)), -+ satisfies(AWS_REQUEST_ID, v -> v.isInstanceOf(String.class)), -+ equalTo(RPC_METHOD, rpcMethod), -+ equalTo(RPC_SYSTEM, "aws-api"), -+ equalTo(RPC_SERVICE, "AmazonSNS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), -+ equalTo(HTTP_REQUEST_METHOD, "POST"), -+ equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -+ satisfies(URL_FULL, val -> val.startsWith("http://")), -+ satisfies(SERVER_ADDRESS, v -> v.isInstanceOf(String.class)), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ satisfies( -+ SERVER_PORT, -+ val -> -+ val.satisfiesAnyOf( -+ v -> assertThat(v).isNull(), v -> assertThat(v).isInstanceOf(Number.class))), -+ equalTo(stringKey("aws.sns.topic.arn"), topicArn)); - } - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/S3TracingTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/S3TracingTest.java -index 56eca09f8c..82c3379840 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/S3TracingTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/S3TracingTest.java -@@ -105,6 +105,7 @@ class S3TracingTest { - equalTo(RPC_METHOD, "ReceiveMessage"), - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "AmazonSQS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), - satisfies(URL_FULL, val -> val.startsWith("http://")), -@@ -198,6 +199,7 @@ class S3TracingTest { - equalTo(RPC_METHOD, "ReceiveMessage"), - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "AmazonSQS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), - satisfies(URL_FULL, val -> val.startsWith("http://")), -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/SnsTracingTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/SnsTracingTest.java -index 429ca07938..d21918bc70 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/SnsTracingTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awssdk/v1_11/SnsTracingTest.java -@@ -89,6 +89,7 @@ class SnsTracingTest { - equalTo(RPC_METHOD, "ReceiveMessage"), - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "AmazonSQS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "test"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), - satisfies(URL_FULL, val -> val.startsWith("http://")), -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library-autoconfigure/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-1.11/library-autoconfigure/build.gradle.kts -index 6cf49a21c4..3705634153 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/library-autoconfigure/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library-autoconfigure/build.gradle.kts -@@ -18,6 +18,13 @@ dependencies { - testLibrary("com.amazonaws:aws-java-sdk-dynamodb:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-sns:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-sqs:1.11.106") -+ testLibrary("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") -+ testLibrary("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") -+ testLibrary("com.amazonaws:aws-java-sdk-lambda:1.11.678") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrock:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagent:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagentruntime:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockruntime:1.12.744") - - // last version that does not use json protocol - latestDepTestLibrary("com.amazonaws:aws-java-sdk-sqs:1.12.583") -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-1.11/library/build.gradle.kts -index bfe844e413..dec4935b55 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/library/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/build.gradle.kts -@@ -17,6 +17,14 @@ dependencies { - testLibrary("com.amazonaws:aws-java-sdk-kinesis:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-dynamodb:1.11.106") - testLibrary("com.amazonaws:aws-java-sdk-sns:1.11.106") -+ testLibrary("com.amazonaws:aws-java-sdk-sqs:1.11.106") -+ testLibrary("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") -+ testLibrary("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") -+ testLibrary("com.amazonaws:aws-java-sdk-lambda:1.11.678") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrock:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagent:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockagentruntime:1.12.744") -+ testLibrary("com.amazonaws:aws-java-sdk-bedrockruntime:1.12.744") - - // last version that does not use json protocol - latestDepTestLibrary("com.amazonaws:aws-java-sdk-sqs:1.12.583") -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsBedrockResourceType.java b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsBedrockResourceType.java -new file mode 100644 -index 0000000000..e890cb3c0f ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsBedrockResourceType.java -@@ -0,0 +1,133 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_AGENT_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_DATA_SOURCE_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_KNOWLEDGE_BASE_ID; -+ -+import io.opentelemetry.api.common.AttributeKey; -+import java.util.Arrays; -+import java.util.HashMap; -+import java.util.List; -+import java.util.Map; -+import java.util.function.Function; -+ -+enum AwsBedrockResourceType { -+ AGENT_TYPE(AWS_AGENT_ID, RequestAccess::getAgentId), -+ DATA_SOURCE_TYPE(AWS_DATA_SOURCE_ID, RequestAccess::getDataSourceId), -+ KNOWLEDGE_BASE_TYPE(AWS_KNOWLEDGE_BASE_ID, RequestAccess::getKnowledgeBaseId); -+ -+ @SuppressWarnings("ImmutableEnumChecker") -+ private final AttributeKey keyAttribute; -+ -+ @SuppressWarnings("ImmutableEnumChecker") -+ private final Function attributeValueAccessor; -+ -+ AwsBedrockResourceType( -+ AttributeKey keyAttribute, Function attributeValueAccessor) { -+ this.keyAttribute = keyAttribute; -+ this.attributeValueAccessor = attributeValueAccessor; -+ } -+ -+ public AttributeKey getKeyAttribute() { -+ return keyAttribute; -+ } -+ -+ public Function getAttributeValueAccessor() { -+ return attributeValueAccessor; -+ } -+ -+ public static AwsBedrockResourceType getRequestType(String requestClass) { -+ return AwsBedrockResourceTypeMap.BEDROCK_REQUEST_MAP.get(requestClass); -+ } -+ -+ public static AwsBedrockResourceType getResponseType(String responseClass) { -+ return AwsBedrockResourceTypeMap.BEDROCK_RESPONSE_MAP.get(responseClass); -+ } -+ -+ private static class AwsBedrockResourceTypeMap { -+ private static final Map BEDROCK_REQUEST_MAP = new HashMap<>(); -+ private static final Map BEDROCK_RESPONSE_MAP = new HashMap<>(); -+ -+ // Bedrock request/response mapping -+ // We only support operations that are related to the resource and where the context contains -+ // the AgentID/DataSourceID/KnowledgeBaseID. -+ // AgentID -+ private static final List agentRequestClasses = -+ Arrays.asList( -+ "CreateAgentActionGroupRequest", -+ "CreateAgentAliasRequest", -+ "DeleteAgentActionGroupRequest", -+ "DeleteAgentAliasRequest", -+ "DeleteAgentRequest", -+ "DeleteAgentVersionRequest", -+ "GetAgentActionGroupRequest", -+ "GetAgentAliasRequest", -+ "GetAgentRequest", -+ "GetAgentVersionRequest", -+ "ListAgentActionGroupsRequest", -+ "ListAgentAliasesRequest", -+ "ListAgentKnowledgeBasesRequest", -+ "ListAgentVersionsRequest", -+ "PrepareAgentRequest", -+ "UpdateAgentActionGroupRequest", -+ "UpdateAgentAliasRequest", -+ "UpdateAgentRequest"); -+ private static final List agentResponseClasses = -+ Arrays.asList( -+ "DeleteAgentAliasResult", -+ "DeleteAgentResult", -+ "DeleteAgentVersionResult", -+ "PrepareAgentResult"); -+ // DataSourceID -+ private static final List dataSourceRequestClasses = -+ Arrays.asList("DeleteDataSourceRequest", "GetDataSourceRequest", "UpdateDataSourceRequest"); -+ private static final List dataSourceResponseClasses = -+ Arrays.asList("DeleteDataSourceResult"); -+ // KnowledgeBaseID -+ private static final List knowledgeBaseRequestClasses = -+ Arrays.asList( -+ "AssociateAgentKnowledgeBaseRequest", -+ "CreateDataSourceRequest", -+ "DeleteKnowledgeBaseRequest", -+ "DisassociateAgentKnowledgeBaseRequest", -+ "GetAgentKnowledgeBaseRequest", -+ "GetKnowledgeBaseRequest", -+ "ListDataSourcesRequest", -+ "UpdateAgentKnowledgeBaseRequest"); -+ private static final List knowledgeBaseResponseClasses = -+ Arrays.asList("DeleteKnowledgeBaseResult"); -+ -+ private AwsBedrockResourceTypeMap() {} -+ -+ static { -+ // Populate the BEDROCK_REQUEST_MAP -+ for (String agentRequestClass : agentRequestClasses) { -+ BEDROCK_REQUEST_MAP.put(agentRequestClass, AwsBedrockResourceType.AGENT_TYPE); -+ } -+ for (String dataSourceRequestClass : dataSourceRequestClasses) { -+ BEDROCK_REQUEST_MAP.put(dataSourceRequestClass, AwsBedrockResourceType.DATA_SOURCE_TYPE); -+ } -+ for (String knowledgeBaseRequestClass : knowledgeBaseRequestClasses) { -+ BEDROCK_REQUEST_MAP.put( -+ knowledgeBaseRequestClass, AwsBedrockResourceType.KNOWLEDGE_BASE_TYPE); -+ } -+ -+ // Populate the BEDROCK_RESPONSE_MAP -+ for (String agentResponseClass : agentResponseClasses) { -+ BEDROCK_REQUEST_MAP.put(agentResponseClass, AwsBedrockResourceType.AGENT_TYPE); -+ } -+ for (String dataSourceResponseClass : dataSourceResponseClasses) { -+ BEDROCK_REQUEST_MAP.put(dataSourceResponseClass, AwsBedrockResourceType.DATA_SOURCE_TYPE); -+ } -+ for (String knowledgeBaseResponseClass : knowledgeBaseResponseClasses) { -+ BEDROCK_REQUEST_MAP.put( -+ knowledgeBaseResponseClass, AwsBedrockResourceType.KNOWLEDGE_BASE_TYPE); -+ } -+ } -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsExperimentalAttributes.java b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsExperimentalAttributes.java -index 096c7826a1..27613c04f2 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsExperimentalAttributes.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsExperimentalAttributes.java -@@ -16,7 +16,41 @@ final class AwsExperimentalAttributes { - static final AttributeKey AWS_QUEUE_URL = stringKey("aws.queue.url"); - static final AttributeKey AWS_QUEUE_NAME = stringKey("aws.queue.name"); - static final AttributeKey AWS_STREAM_NAME = stringKey("aws.stream.name"); -+ static final AttributeKey AWS_STREAM_ARN = stringKey("aws.stream.arn"); - static final AttributeKey AWS_TABLE_NAME = stringKey("aws.table.name"); -+ static final AttributeKey AWS_TABLE_ARN = stringKey("aws.table.arn"); -+ static final AttributeKey AWS_AGENT_ID = stringKey("aws.bedrock.agent.id"); -+ static final AttributeKey AWS_KNOWLEDGE_BASE_ID = -+ stringKey("aws.bedrock.knowledge_base.id"); -+ static final AttributeKey AWS_DATA_SOURCE_ID = stringKey("aws.bedrock.data_source.id"); -+ static final AttributeKey AWS_GUARDRAIL_ID = stringKey("aws.bedrock.guardrail.id"); -+ static final AttributeKey AWS_GUARDRAIL_ARN = stringKey("aws.bedrock.guardrail.arn"); -+ // TODO: Merge in gen_ai attributes in opentelemetry-semconv-incubating once upgrade to v1.26.0 -+ static final AttributeKey AWS_BEDROCK_RUNTIME_MODEL_ID = -+ stringKey("gen_ai.request.model"); -+ static final AttributeKey AWS_BEDROCK_SYSTEM = stringKey("gen_ai.system"); -+ static final AttributeKey GEN_AI_REQUEST_MAX_TOKENS = -+ stringKey("gen_ai.request.max_tokens"); -+ static final AttributeKey GEN_AI_REQUEST_TEMPERATURE = -+ stringKey("gen_ai.request.temperature"); -+ static final AttributeKey GEN_AI_REQUEST_TOP_P = stringKey("gen_ai.request.top_p"); -+ static final AttributeKey GEN_AI_RESPONSE_FINISH_REASONS = -+ stringKey("gen_ai.response.finish_reasons"); -+ static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = -+ stringKey("gen_ai.usage.input_tokens"); -+ static final AttributeKey GEN_AI_USAGE_OUTPUT_TOKENS = -+ stringKey("gen_ai.usage.output_tokens"); -+ static final AttributeKey AWS_STATE_MACHINE_ARN = -+ stringKey("aws.stepfunctions.state_machine.arn"); -+ static final AttributeKey AWS_STEP_FUNCTIONS_ACTIVITY_ARN = -+ stringKey("aws.stepfunctions.activity.arn"); -+ static final AttributeKey AWS_SNS_TOPIC_ARN = stringKey("aws.sns.topic.arn"); -+ static final AttributeKey AWS_SECRET_ARN = stringKey("aws.secretsmanager.secret.arn"); -+ static final AttributeKey AWS_LAMBDA_NAME = stringKey("aws.lambda.function.name"); -+ static final AttributeKey AWS_LAMBDA_ARN = stringKey("aws.lambda.function.arn"); -+ static final AttributeKey AWS_LAMBDA_RESOURCE_ID = -+ stringKey("aws.lambda.resource_mapping.id"); -+ static final AttributeKey AWS_AUTH_ACCESS_KEY = stringKey("aws.auth.account.access_key"); - - private AwsExperimentalAttributes() {} - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsSdkExperimentalAttributesExtractor.java b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsSdkExperimentalAttributesExtractor.java -index 541e67d23b..5a321f9cb1 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsSdkExperimentalAttributesExtractor.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AwsSdkExperimentalAttributesExtractor.java -@@ -6,25 +6,56 @@ - package io.opentelemetry.instrumentation.awssdk.v1_11; - - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_AGENT; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_AGENT_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_AUTH_ACCESS_KEY; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_BEDROCK_RUNTIME_MODEL_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_BEDROCK_SYSTEM; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_BUCKET_NAME; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_ENDPOINT; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_GUARDRAIL_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_GUARDRAIL_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_KNOWLEDGE_BASE_ID; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_LAMBDA_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_LAMBDA_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_LAMBDA_RESOURCE_ID; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_QUEUE_NAME; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_QUEUE_URL; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_SECRET_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_SNS_TOPIC_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_STATE_MACHINE_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_STEP_FUNCTIONS_ACTIVITY_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_STREAM_ARN; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_STREAM_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_TABLE_ARN; - import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.AWS_TABLE_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_REQUEST_MAX_TOKENS; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_REQUEST_TEMPERATURE; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_REQUEST_TOP_P; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_RESPONSE_FINISH_REASONS; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_USAGE_INPUT_TOKENS; -+import static io.opentelemetry.instrumentation.awssdk.v1_11.AwsExperimentalAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; - - import com.amazonaws.Request; - import com.amazonaws.Response; -+import com.amazonaws.auth.AWSCredentials; -+import com.amazonaws.handlers.HandlerContextKey; - import io.opentelemetry.api.common.AttributeKey; - import io.opentelemetry.api.common.AttributesBuilder; - import io.opentelemetry.context.Context; - import io.opentelemetry.instrumentation.api.instrumenter.AttributesExtractor; -+import java.util.Objects; - import java.util.function.Function; - import javax.annotation.Nullable; - - class AwsSdkExperimentalAttributesExtractor - implements AttributesExtractor, Response> { - private static final String COMPONENT_NAME = "java-aws-sdk"; -+ private static final String BEDROCK_SERVICE = "AmazonBedrock"; -+ private static final String BEDROCK_AGENT_SERVICE = "AWSBedrockAgent"; -+ private static final String BEDROCK_AGENT_RUNTIME_SERVICE = "AWSBedrockAgentRuntime"; -+ private static final String BEDROCK_RUNTIME_SERVICE = "AmazonBedrockRuntime"; -+ private static final HandlerContextKey AWS_CREDENTIALS = -+ new HandlerContextKey("AWSCredentials"); - - @Override - public void onStart(AttributesBuilder attributes, Context parentContext, Request request) { -@@ -32,14 +63,165 @@ class AwsSdkExperimentalAttributesExtractor - attributes.put(AWS_ENDPOINT, request.getEndpoint().toString()); - - Object originalRequest = request.getOriginalRequest(); -- setRequestAttribute(attributes, AWS_BUCKET_NAME, originalRequest, RequestAccess::getBucketName); -- setRequestAttribute(attributes, AWS_QUEUE_URL, originalRequest, RequestAccess::getQueueUrl); -- setRequestAttribute(attributes, AWS_QUEUE_NAME, originalRequest, RequestAccess::getQueueName); -- setRequestAttribute(attributes, AWS_STREAM_NAME, originalRequest, RequestAccess::getStreamName); -- setRequestAttribute(attributes, AWS_TABLE_NAME, originalRequest, RequestAccess::getTableName); -+ String requestClassName = originalRequest.getClass().getSimpleName(); -+ AWSCredentials credentials = request.getHandlerContext(AWS_CREDENTIALS); -+ if (credentials != null) { -+ String accessKeyId = credentials.getAWSAccessKeyId(); -+ if (accessKeyId != null) { -+ attributes.put(AWS_AUTH_ACCESS_KEY, accessKeyId); -+ } -+ } -+ setAttribute(attributes, AWS_BUCKET_NAME, originalRequest, RequestAccess::getBucketName); -+ setAttribute(attributes, AWS_QUEUE_URL, originalRequest, RequestAccess::getQueueUrl); -+ setAttribute(attributes, AWS_QUEUE_NAME, originalRequest, RequestAccess::getQueueName); -+ setAttribute(attributes, AWS_STREAM_NAME, originalRequest, RequestAccess::getStreamName); -+ setAttribute(attributes, AWS_STREAM_ARN, originalRequest, RequestAccess::getStreamArn); -+ setAttribute(attributes, AWS_TABLE_NAME, originalRequest, RequestAccess::getTableName); -+ setAttribute( -+ attributes, AWS_STATE_MACHINE_ARN, originalRequest, RequestAccess::getStateMachineArn); -+ setAttribute( -+ attributes, -+ AWS_STEP_FUNCTIONS_ACTIVITY_ARN, -+ originalRequest, -+ RequestAccess::getStepFunctionsActivityArn); -+ setAttribute(attributes, AWS_SNS_TOPIC_ARN, originalRequest, RequestAccess::getSnsTopicArn); -+ setAttribute(attributes, AWS_SECRET_ARN, originalRequest, RequestAccess::getSecretArn); -+ setAttribute(attributes, AWS_LAMBDA_NAME, originalRequest, RequestAccess::getLambdaName); -+ setAttribute( -+ attributes, AWS_LAMBDA_RESOURCE_ID, originalRequest, RequestAccess::getLambdaResourceId); -+ // Get serviceName defined in the AWS Java SDK V1 Request class. -+ String serviceName = request.getServiceName(); -+ // Extract request attributes only for Bedrock services. -+ if (isBedrockService(serviceName)) { -+ bedrockOnStart(attributes, originalRequest, requestClassName, serviceName); -+ } - } - -- private static void setRequestAttribute( -+ @Override -+ public void onEnd( -+ AttributesBuilder attributes, -+ Context context, -+ Request request, -+ @Nullable Response response, -+ @Nullable Throwable error) { -+ if (response != null) { -+ Object awsResp = response.getAwsResponse(); -+ setAttribute(attributes, AWS_TABLE_ARN, awsResp, RequestAccess::getTableArn); -+ setAttribute(attributes, AWS_LAMBDA_ARN, awsResp, RequestAccess::getLambdaArn); -+ setAttribute(attributes, AWS_STATE_MACHINE_ARN, awsResp, RequestAccess::getStateMachineArn); -+ setAttribute( -+ attributes, -+ AWS_STEP_FUNCTIONS_ACTIVITY_ARN, -+ awsResp, -+ RequestAccess::getStepFunctionsActivityArn); -+ setAttribute(attributes, AWS_SNS_TOPIC_ARN, awsResp, RequestAccess::getSnsTopicArn); -+ setAttribute(attributes, AWS_SECRET_ARN, awsResp, RequestAccess::getSecretArn); -+ // Get serviceName defined in the AWS Java SDK V1 Request class. -+ String serviceName = request.getServiceName(); -+ // Extract response attributes for Bedrock services -+ if (awsResp != null && isBedrockService(serviceName)) { -+ bedrockOnEnd(attributes, awsResp, serviceName); -+ } -+ } -+ } -+ -+ private static void bedrockOnStart( -+ AttributesBuilder attributes, -+ Object originalRequest, -+ String requestClassName, -+ String serviceName) { -+ switch (serviceName) { -+ case BEDROCK_SERVICE: -+ setAttribute(attributes, AWS_GUARDRAIL_ID, originalRequest, RequestAccess::getGuardrailId); -+ break; -+ case BEDROCK_AGENT_SERVICE: -+ AwsBedrockResourceType resourceType = -+ AwsBedrockResourceType.getRequestType(requestClassName); -+ if (resourceType != null) { -+ setAttribute( -+ attributes, -+ resourceType.getKeyAttribute(), -+ originalRequest, -+ resourceType.getAttributeValueAccessor()); -+ } -+ break; -+ case BEDROCK_AGENT_RUNTIME_SERVICE: -+ setAttribute(attributes, AWS_AGENT_ID, originalRequest, RequestAccess::getAgentId); -+ setAttribute( -+ attributes, AWS_KNOWLEDGE_BASE_ID, originalRequest, RequestAccess::getKnowledgeBaseId); -+ break; -+ case BEDROCK_RUNTIME_SERVICE: -+ if (!Objects.equals(requestClassName, "InvokeModelRequest")) { -+ break; -+ } -+ attributes.put(AWS_BEDROCK_SYSTEM, "aws.bedrock"); -+ Function getter = RequestAccess::getModelId; -+ String modelId = getter.apply(originalRequest); -+ attributes.put(AWS_BEDROCK_RUNTIME_MODEL_ID, modelId); -+ -+ setAttribute( -+ attributes, GEN_AI_REQUEST_MAX_TOKENS, originalRequest, RequestAccess::getMaxTokens); -+ setAttribute( -+ attributes, GEN_AI_REQUEST_TEMPERATURE, originalRequest, RequestAccess::getTemperature); -+ setAttribute(attributes, GEN_AI_REQUEST_TOP_P, originalRequest, RequestAccess::getTopP); -+ setAttribute( -+ attributes, GEN_AI_USAGE_INPUT_TOKENS, originalRequest, RequestAccess::getInputTokens); -+ break; -+ default: -+ break; -+ } -+ } -+ -+ private static void bedrockOnEnd( -+ AttributesBuilder attributes, Object awsResp, String serviceName) { -+ switch (serviceName) { -+ case BEDROCK_SERVICE: -+ setAttribute(attributes, AWS_GUARDRAIL_ID, awsResp, RequestAccess::getGuardrailId); -+ setAttribute(attributes, AWS_GUARDRAIL_ARN, awsResp, RequestAccess::getGuardrailArn); -+ break; -+ case BEDROCK_AGENT_SERVICE: -+ String responseClassName = awsResp.getClass().getSimpleName(); -+ AwsBedrockResourceType resourceType = -+ AwsBedrockResourceType.getResponseType(responseClassName); -+ if (resourceType != null) { -+ setAttribute( -+ attributes, -+ resourceType.getKeyAttribute(), -+ awsResp, -+ resourceType.getAttributeValueAccessor()); -+ } -+ break; -+ case BEDROCK_AGENT_RUNTIME_SERVICE: -+ setAttribute(attributes, AWS_AGENT_ID, awsResp, RequestAccess::getAgentId); -+ setAttribute(attributes, AWS_KNOWLEDGE_BASE_ID, awsResp, RequestAccess::getKnowledgeBaseId); -+ break; -+ case BEDROCK_RUNTIME_SERVICE: -+ if (!Objects.equals(awsResp.getClass().getSimpleName(), "InvokeModelResult")) { -+ break; -+ } -+ -+ setAttribute(attributes, GEN_AI_USAGE_INPUT_TOKENS, awsResp, RequestAccess::getInputTokens); -+ setAttribute( -+ attributes, GEN_AI_USAGE_OUTPUT_TOKENS, awsResp, RequestAccess::getOutputTokens); -+ setAttribute( -+ attributes, GEN_AI_RESPONSE_FINISH_REASONS, awsResp, RequestAccess::getFinishReasons); -+ break; -+ default: -+ break; -+ } -+ } -+ -+ private static boolean isBedrockService(String serviceName) { -+ // Check if the serviceName belongs to Bedrock Services defined in AWS Java SDK V1. -+ // For example AmazonBedrock -+ return serviceName.equals(BEDROCK_SERVICE) -+ || serviceName.equals(BEDROCK_AGENT_SERVICE) -+ || serviceName.equals(BEDROCK_AGENT_RUNTIME_SERVICE) -+ || serviceName.equals(BEDROCK_RUNTIME_SERVICE); -+ } -+ -+ private static void setAttribute( - AttributesBuilder attributes, - AttributeKey key, - Object request, -@@ -49,12 +231,4 @@ class AwsSdkExperimentalAttributesExtractor - attributes.put(key, value); - } - } -- -- @Override -- public void onEnd( -- AttributesBuilder attributes, -- Context context, -- Request request, -- @Nullable Response response, -- @Nullable Throwable error) {} - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParser.java b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParser.java -new file mode 100644 -index 0000000000..d1acc5768a ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParser.java -@@ -0,0 +1,267 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import java.util.ArrayList; -+import java.util.HashMap; -+import java.util.List; -+import java.util.Map; -+ -+public class BedrockJsonParser { -+ -+ // Prevent instantiation -+ private BedrockJsonParser() { -+ throw new UnsupportedOperationException("Utility class"); -+ } -+ -+ public static LlmJson parse(String jsonString) { -+ JsonParser parser = new JsonParser(jsonString); -+ Map jsonBody = parser.parse(); -+ return new LlmJson(jsonBody); -+ } -+ -+ static class JsonParser { -+ private final String json; -+ private int position; -+ -+ public JsonParser(String json) { -+ this.json = json.trim(); -+ this.position = 0; -+ } -+ -+ private void skipWhitespace() { -+ while (position < json.length() && Character.isWhitespace(json.charAt(position))) { -+ position++; -+ } -+ } -+ -+ private char currentChar() { -+ return json.charAt(position); -+ } -+ -+ private static boolean isHexDigit(char c) { -+ return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); -+ } -+ -+ private void expect(char c) { -+ skipWhitespace(); -+ if (currentChar() != c) { -+ throw new IllegalArgumentException( -+ "Expected '" + c + "' but found '" + currentChar() + "'"); -+ } -+ position++; -+ } -+ -+ private String readString() { -+ skipWhitespace(); -+ expect('"'); // Ensure the string starts with a quote -+ StringBuilder result = new StringBuilder(); -+ while (currentChar() != '"') { -+ // Handle escape sequences -+ if (currentChar() == '\\') { -+ position++; // Move past the backslash -+ if (position >= json.length()) { -+ throw new IllegalArgumentException("Unexpected end of input in string escape sequence"); -+ } -+ char escapeChar = currentChar(); -+ switch (escapeChar) { -+ case '"': -+ case '\\': -+ case '/': -+ result.append(escapeChar); -+ break; -+ case 'b': -+ result.append('\b'); -+ break; -+ case 'f': -+ result.append('\f'); -+ break; -+ case 'n': -+ result.append('\n'); -+ break; -+ case 'r': -+ result.append('\r'); -+ break; -+ case 't': -+ result.append('\t'); -+ break; -+ case 'u': // Unicode escape sequence -+ if (position + 4 >= json.length()) { -+ throw new IllegalArgumentException("Invalid unicode escape sequence in string"); -+ } -+ char[] hexChars = new char[4]; -+ for (int i = 0; i < 4; i++) { -+ position++; // Move to the next character -+ char hexChar = json.charAt(position); -+ if (!isHexDigit(hexChar)) { -+ throw new IllegalArgumentException( -+ "Invalid hexadecimal digit in unicode escape sequence"); -+ } -+ hexChars[i] = hexChar; -+ } -+ int unicodeValue = Integer.parseInt(new String(hexChars), 16); -+ result.append((char) unicodeValue); -+ break; -+ default: -+ throw new IllegalArgumentException("Invalid escape character: \\" + escapeChar); -+ } -+ position++; -+ } else { -+ result.append(currentChar()); -+ position++; -+ } -+ } -+ position++; // Skip closing quote -+ return result.toString(); -+ } -+ -+ private Object readValue() { -+ skipWhitespace(); -+ char c = currentChar(); -+ -+ if (c == '"') { -+ return readString(); -+ } else if (Character.isDigit(c)) { -+ return readScopedNumber(); -+ } else if (c == '{') { -+ return readObject(); // JSON Objects -+ } else if (c == '[') { -+ return readArray(); // JSON Arrays -+ } else if (json.startsWith("true", position)) { -+ position += 4; -+ return true; -+ } else if (json.startsWith("false", position)) { -+ position += 5; -+ return false; -+ } else if (json.startsWith("null", position)) { -+ position += 4; -+ return null; // JSON null -+ } else { -+ throw new IllegalArgumentException("Unexpected character: " + c); -+ } -+ } -+ -+ private Number readScopedNumber() { -+ int start = position; -+ -+ // Consume digits and the optional decimal point -+ while (position < json.length() -+ && (Character.isDigit(json.charAt(position)) || json.charAt(position) == '.')) { -+ position++; -+ } -+ -+ String number = json.substring(start, position); -+ -+ if (number.contains(".")) { -+ double value = Double.parseDouble(number); -+ if (value < 0.0 || value > 1.0) { -+ throw new IllegalArgumentException( -+ "Value out of bounds for Bedrock Floating Point Attribute: " + number); -+ } -+ return value; -+ } else { -+ return Integer.parseInt(number); -+ } -+ } -+ -+ private Map readObject() { -+ Map map = new HashMap<>(); -+ expect('{'); -+ skipWhitespace(); -+ while (currentChar() != '}') { -+ String key = readString(); -+ expect(':'); -+ Object value = readValue(); -+ map.put(key, value); -+ skipWhitespace(); -+ if (currentChar() == ',') { -+ position++; -+ } -+ } -+ position++; // Skip closing brace -+ return map; -+ } -+ -+ private List readArray() { -+ List list = new ArrayList<>(); -+ expect('['); -+ skipWhitespace(); -+ while (currentChar() != ']') { -+ list.add(readValue()); -+ skipWhitespace(); -+ if (currentChar() == ',') { -+ position++; -+ } -+ } -+ position++; -+ return list; -+ } -+ -+ public Map parse() { -+ return readObject(); -+ } -+ } -+ -+ // Resolves paths in a JSON structure -+ static class JsonPathResolver { -+ -+ // Private constructor to prevent instantiation -+ private JsonPathResolver() { -+ throw new UnsupportedOperationException("Utility class"); -+ } -+ -+ public static Object resolvePath(LlmJson llmJson, String... paths) { -+ for (String path : paths) { -+ Object value = resolvePath(llmJson.getJsonBody(), path); -+ if (value != null) { -+ return value; -+ } -+ } -+ return null; -+ } -+ -+ private static Object resolvePath(Map json, String path) { -+ String[] keys = path.split("/"); -+ Object current = json; -+ -+ for (String key : keys) { -+ if (key.isEmpty()) { -+ continue; -+ } -+ -+ if (current instanceof Map) { -+ current = ((Map) current).get(key); -+ } else if (current instanceof List) { -+ try { -+ int index = Integer.parseInt(key); -+ current = ((List) current).get(index); -+ } catch (NumberFormatException | IndexOutOfBoundsException e) { -+ return null; -+ } -+ } else { -+ return null; -+ } -+ -+ if (current == null) { -+ return null; -+ } -+ } -+ return current; -+ } -+ } -+ -+ public static class LlmJson { -+ private final Map jsonBody; -+ -+ public LlmJson(Map jsonBody) { -+ this.jsonBody = jsonBody; -+ } -+ -+ public Map getJsonBody() { -+ return jsonBody; -+ } -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/RequestAccess.java b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/RequestAccess.java -index c212a69678..82a7185abe 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/RequestAccess.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/RequestAccess.java -@@ -8,6 +8,12 @@ package io.opentelemetry.instrumentation.awssdk.v1_11; - import java.lang.invoke.MethodHandle; - import java.lang.invoke.MethodHandles; - import java.lang.invoke.MethodType; -+import java.lang.reflect.Method; -+import java.nio.ByteBuffer; -+import java.nio.charset.StandardCharsets; -+import java.util.Arrays; -+import java.util.Objects; -+import java.util.stream.Stream; - import javax.annotation.Nullable; - - final class RequestAccess { -@@ -20,48 +26,417 @@ final class RequestAccess { - } - }; - -+ @Nullable -+ private static BedrockJsonParser.LlmJson parseTargetBody(ByteBuffer buffer) { -+ try { -+ byte[] bytes; -+ // Create duplicate to avoid mutating the original buffer position -+ ByteBuffer duplicate = buffer.duplicate(); -+ if (buffer.hasArray()) { -+ bytes = -+ Arrays.copyOfRange( -+ duplicate.array(), -+ duplicate.arrayOffset(), -+ duplicate.arrayOffset() + duplicate.remaining()); -+ } else { -+ bytes = new byte[buffer.remaining()]; -+ buffer.get(bytes); -+ } -+ String jsonString = new String(bytes, StandardCharsets.UTF_8); // Convert to String -+ return BedrockJsonParser.parse(jsonString); -+ } catch (RuntimeException e) { -+ return null; -+ } -+ } -+ -+ @Nullable -+ private static BedrockJsonParser.LlmJson getJsonBody(Object target) { -+ if (target == null) { -+ return null; -+ } -+ -+ RequestAccess access = REQUEST_ACCESSORS.get(target.getClass()); -+ ByteBuffer bodyBuffer = invokeOrNullGeneric(access.getBody, target, ByteBuffer.class); -+ if (bodyBuffer == null) { -+ return null; -+ } -+ -+ return parseTargetBody(bodyBuffer); -+ } -+ -+ @Nullable -+ private static String findFirstMatchingPath(BedrockJsonParser.LlmJson jsonBody, String... paths) { -+ if (jsonBody == null) { -+ return null; -+ } -+ -+ return Stream.of(paths) -+ .map(path -> BedrockJsonParser.JsonPathResolver.resolvePath(jsonBody, path)) -+ .filter(Objects::nonNull) -+ .map(Object::toString) -+ .findFirst() -+ .orElse(null); -+ } -+ -+ @Nullable -+ private static String approximateTokenCount( -+ BedrockJsonParser.LlmJson jsonBody, String... textPaths) { -+ if (jsonBody == null) { -+ return null; -+ } -+ -+ return Stream.of(textPaths) -+ .map(path -> BedrockJsonParser.JsonPathResolver.resolvePath(jsonBody, path)) -+ .filter(value -> value instanceof String) -+ .map(value -> Integer.toString((int) Math.ceil(((String) value).length() / 6.0))) -+ .findFirst() -+ .orElse(null); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/max_new_tokens" -+ // Amazon Titan -> "/textGenerationConfig/maxTokenCount" -+ // Anthropic Claude -> "/max_tokens" -+ // Cohere Command -> "/max_tokens" -+ // Cohere Command R -> "/max_tokens" -+ // AI21 Jamba -> "/max_tokens" -+ // Meta Llama -> "/max_gen_len" -+ // Mistral AI -> "/max_tokens" -+ @Nullable -+ static String getMaxTokens(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ return findFirstMatchingPath( -+ jsonBody, -+ "/max_tokens", -+ "/max_gen_len", -+ "/textGenerationConfig/maxTokenCount", -+ "/inferenceConfig/max_new_tokens"); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/temperature" -+ // Amazon Titan -> "/textGenerationConfig/temperature" -+ // Anthropic Claude -> "/temperature" -+ // Cohere Command -> "/temperature" -+ // Cohere Command R -> "/temperature" -+ // AI21 Jamba -> "/temperature" -+ // Meta Llama -> "/temperature" -+ // Mistral AI -> "/temperature" -+ @Nullable -+ static String getTemperature(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ return findFirstMatchingPath( -+ jsonBody, -+ "/temperature", -+ "/textGenerationConfig/temperature", -+ "inferenceConfig/temperature"); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/top_p" -+ // Amazon Titan -> "/textGenerationConfig/topP" -+ // Anthropic Claude -> "/top_p" -+ // Cohere Command -> "/p" -+ // Cohere Command R -> "/p" -+ // AI21 Jamba -> "/top_p" -+ // Meta Llama -> "/top_p" -+ // Mistral AI -> "/top_p" -+ @Nullable -+ static String getTopP(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ return findFirstMatchingPath( -+ jsonBody, "/top_p", "/p", "/textGenerationConfig/topP", "/inferenceConfig/top_p"); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/usage/inputTokens" -+ // Amazon Titan -> "/inputTextTokenCount" -+ // Anthropic Claude -> "/usage/input_tokens" -+ // Cohere Command -> "/prompt" -+ // Cohere Command R -> "/message" -+ // AI21 Jamba -> "/usage/prompt_tokens" -+ // Meta Llama -> "/prompt_token_count" -+ // Mistral AI -> "/prompt" -+ @Nullable -+ static String getInputTokens(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ if (jsonBody == null) { -+ return null; -+ } -+ -+ // Try direct token counts first -+ String directCount = -+ findFirstMatchingPath( -+ jsonBody, -+ "/inputTextTokenCount", -+ "/prompt_token_count", -+ "/usage/input_tokens", -+ "/usage/prompt_tokens", -+ "/usage/inputTokens"); -+ -+ if (directCount != null && !directCount.equals("null")) { -+ return directCount; -+ } -+ -+ // Fall back to token approximation -+ return approximateTokenCount(jsonBody, "/prompt", "/message"); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/usage/outputTokens" -+ // Amazon Titan -> "/results/0/tokenCount" -+ // Anthropic Claude -> "/usage/output_tokens" -+ // Cohere Command -> "/generations/0/text" -+ // Cohere Command R -> "/text" -+ // AI21 Jamba -> "/usage/completion_tokens" -+ // Meta Llama -> "/generation_token_count" -+ // Mistral AI -> "/outputs/0/text" -+ @Nullable -+ static String getOutputTokens(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ if (jsonBody == null) { -+ return null; -+ } -+ -+ // Try direct token counts first -+ String directCount = -+ findFirstMatchingPath( -+ jsonBody, -+ "/generation_token_count", -+ "/results/0/tokenCount", -+ "/usage/output_tokens", -+ "/usage/completion_tokens", -+ "/usage/outputTokens"); -+ -+ if (directCount != null && !directCount.equals("null")) { -+ return directCount; -+ } -+ -+ // Fall back to token approximation -+ return approximateTokenCount(jsonBody, "/text", "/outputs/0/text"); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/stopReason" -+ // Amazon Titan -> "/results/0/completionReason" -+ // Anthropic Claude -> "/stop_reason" -+ // Cohere Command -> "/generations/0/finish_reason" -+ // Cohere Command R -> "/finish_reason" -+ // AI21 Jamba -> "/choices/0/finish_reason" -+ // Meta Llama -> "/stop_reason" -+ // Mistral AI -> "/outputs/0/stop_reason" -+ @Nullable -+ static String getFinishReasons(Object target) { -+ BedrockJsonParser.LlmJson jsonBody = getJsonBody(target); -+ String finishReason = -+ findFirstMatchingPath( -+ jsonBody, -+ "/stopReason", -+ "/finish_reason", -+ "/stop_reason", -+ "/results/0/completionReason", -+ "/generations/0/finish_reason", -+ "/choices/0/finish_reason", -+ "/outputs/0/stop_reason"); -+ -+ return finishReason != null ? "[" + finishReason + "]" : null; -+ } -+ -+ @Nullable -+ static String getLambdaName(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getLambdaName, request); -+ } -+ -+ @Nullable -+ static String getLambdaArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ return findNestedAccessorOrNull(request, "getConfiguration", "getFunctionArn"); -+ } -+ -+ @Nullable -+ static String getLambdaResourceId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getLambdaResourceId, request); -+ } -+ -+ @Nullable -+ static String getSecretArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getSecretArn, request); -+ } -+ -+ @Nullable -+ static String getSnsTopicArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getSnsTopicArn, request); -+ } -+ -+ @Nullable -+ static String getStepFunctionsActivityArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getStepFunctionsActivityArn, request); -+ } -+ -+ @Nullable -+ static String getStateMachineArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getStateMachineArn, request); -+ } -+ - @Nullable - static String getBucketName(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getBucketName, request); - } - - @Nullable - static String getQueueUrl(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getQueueUrl, request); - } - - @Nullable - static String getQueueName(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getQueueName, request); - } - - @Nullable - static String getStreamName(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getStreamName, request); - } - - @Nullable - static String getTableName(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getTableName, request); - } - -+ @Nullable -+ static String getTableArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ return findNestedAccessorOrNull(request, "getTable", "getTableArn"); -+ } -+ -+ @Nullable -+ static String getStreamArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getStreamArn, request); -+ } -+ - @Nullable - static String getTopicArn(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getTopicArn, request); - } - - @Nullable - static String getTargetArn(Object request) { -+ if (request == null) { -+ return null; -+ } - RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); - return invokeOrNull(access.getTargetArn, request); - } - -+ @Nullable -+ static String getAgentId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getAgentId, request); -+ } -+ -+ @Nullable -+ static String getKnowledgeBaseId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getKnowledgeBaseId, request); -+ } -+ -+ @Nullable -+ static String getDataSourceId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getDataSourceId, request); -+ } -+ -+ @Nullable -+ static String getGuardrailId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getGuardrailId, request); -+ } -+ -+ @Nullable -+ static String getGuardrailArn(Object request) { -+ if (request == null) { -+ return null; -+ } -+ return findNestedAccessorOrNull(request, "getGuardrailArn"); -+ } -+ -+ @Nullable -+ static String getModelId(Object request) { -+ if (request == null) { -+ return null; -+ } -+ RequestAccess access = REQUEST_ACCESSORS.get(request.getClass()); -+ return invokeOrNull(access.getModelId, request); -+ } -+ - @Nullable - private static String invokeOrNull(@Nullable MethodHandle method, Object obj) { - if (method == null) { -@@ -74,31 +449,88 @@ final class RequestAccess { - } - } - -+ @Nullable -+ private static T invokeOrNullGeneric( -+ @Nullable MethodHandle method, Object obj, Class returnType) { -+ if (method == null) { -+ return null; -+ } -+ try { -+ return returnType.cast(method.invoke(obj)); -+ } catch (Throwable e) { -+ return null; -+ } -+ } -+ - @Nullable private final MethodHandle getBucketName; - @Nullable private final MethodHandle getQueueUrl; - @Nullable private final MethodHandle getQueueName; - @Nullable private final MethodHandle getStreamName; -+ @Nullable private final MethodHandle getStreamArn; - @Nullable private final MethodHandle getTableName; - @Nullable private final MethodHandle getTopicArn; - @Nullable private final MethodHandle getTargetArn; -+ @Nullable private final MethodHandle getAgentId; -+ @Nullable private final MethodHandle getKnowledgeBaseId; -+ @Nullable private final MethodHandle getDataSourceId; -+ @Nullable private final MethodHandle getGuardrailId; -+ @Nullable private final MethodHandle getModelId; -+ @Nullable private final MethodHandle getBody; -+ @Nullable private final MethodHandle getStateMachineArn; -+ @Nullable private final MethodHandle getStepFunctionsActivityArn; -+ @Nullable private final MethodHandle getSnsTopicArn; -+ @Nullable private final MethodHandle getSecretArn; -+ @Nullable private final MethodHandle getLambdaName; -+ @Nullable private final MethodHandle getLambdaResourceId; - - private RequestAccess(Class clz) { -- getBucketName = findAccessorOrNull(clz, "getBucketName"); -- getQueueUrl = findAccessorOrNull(clz, "getQueueUrl"); -- getQueueName = findAccessorOrNull(clz, "getQueueName"); -- getStreamName = findAccessorOrNull(clz, "getStreamName"); -- getTableName = findAccessorOrNull(clz, "getTableName"); -- getTopicArn = findAccessorOrNull(clz, "getTopicArn"); -- getTargetArn = findAccessorOrNull(clz, "getTargetArn"); -+ getBucketName = findAccessorOrNull(clz, "getBucketName", String.class); -+ getQueueUrl = findAccessorOrNull(clz, "getQueueUrl", String.class); -+ getQueueName = findAccessorOrNull(clz, "getQueueName", String.class); -+ getStreamName = findAccessorOrNull(clz, "getStreamName", String.class); -+ getStreamArn = findAccessorOrNull(clz, "getStreamARN", String.class); -+ getTableName = findAccessorOrNull(clz, "getTableName", String.class); -+ getTopicArn = findAccessorOrNull(clz, "getTopicArn", String.class); -+ getTargetArn = findAccessorOrNull(clz, "getTargetArn", String.class); -+ getAgentId = findAccessorOrNull(clz, "getAgentId", String.class); -+ getKnowledgeBaseId = findAccessorOrNull(clz, "getKnowledgeBaseId", String.class); -+ getDataSourceId = findAccessorOrNull(clz, "getDataSourceId", String.class); -+ getGuardrailId = findAccessorOrNull(clz, "getGuardrailId", String.class); -+ getModelId = findAccessorOrNull(clz, "getModelId", String.class); -+ getBody = findAccessorOrNull(clz, "getBody", ByteBuffer.class); -+ getStateMachineArn = findAccessorOrNull(clz, "getStateMachineArn", String.class); -+ getStepFunctionsActivityArn = findAccessorOrNull(clz, "getActivityArn", String.class); -+ getSnsTopicArn = findAccessorOrNull(clz, "getTopicArn", String.class); -+ getSecretArn = findAccessorOrNull(clz, "getARN", String.class); -+ getLambdaName = findAccessorOrNull(clz, "getFunctionName", String.class); -+ getLambdaResourceId = findAccessorOrNull(clz, "getUUID", String.class); - } - - @Nullable -- private static MethodHandle findAccessorOrNull(Class clz, String methodName) { -+ private static MethodHandle findAccessorOrNull( -+ Class clz, String methodName, Class returnType) { - try { - return MethodHandles.publicLookup() -- .findVirtual(clz, methodName, MethodType.methodType(String.class)); -+ .findVirtual(clz, methodName, MethodType.methodType(returnType)); - } catch (Throwable t) { - return null; - } - } -+ -+ @Nullable -+ private static String findNestedAccessorOrNull(Object obj, String... methodNames) { -+ Object current = obj; -+ for (String methodName : methodNames) { -+ if (current == null) { -+ return null; -+ } -+ try { -+ Method method = current.getClass().getMethod(methodName); -+ current = method.invoke(current); -+ } catch (Exception e) { -+ return null; -+ } -+ } -+ return (current instanceof String) ? (String) current : null; -+ } - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParserTest.groovy b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParserTest.groovy -new file mode 100644 -index 0000000000..03563b1d5b ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v1_11/BedrockJsonParserTest.groovy -@@ -0,0 +1,107 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11 -+ -+import spock.lang.Specification -+ -+class BedrockJsonParserTest extends Specification { -+ def "should parse simple JSON object"() { -+ given: -+ String json = '{"key":"value"}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ parsedJson.getJsonBody() == [key: "value"] -+ } -+ -+ def "should parse nested JSON object"() { -+ given: -+ String json = '{"parent":{"child":"value"}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ def parent = parsedJson.getJsonBody().get("parent") -+ parent instanceof Map -+ parent["child"] == "value" -+ } -+ -+ def "should parse JSON array"() { -+ given: -+ String json = '{"array":[1, "two", 1.0]}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ def array = parsedJson.getJsonBody().get("array") -+ array instanceof List -+ array == [1, "two", 1.0] -+ } -+ -+ def "should parse escape sequences"() { -+ given: -+ String json = '{"escaped":"Line1\\nLine2\\tTabbed\\\"Quoted\\\"\\bBackspace\\fFormfeed\\rCarriageReturn\\\\Backslash\\/Slash\\u0041"}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ parsedJson.getJsonBody().get("escaped") == -+ "Line1\nLine2\tTabbed\"Quoted\"\bBackspace\fFormfeed\rCarriageReturn\\Backslash/SlashA" -+ } -+ -+ def "should throw exception for malformed JSON"() { -+ given: -+ String malformedJson = '{"key":value}' -+ -+ when: -+ BedrockJsonParser.parse(malformedJson) -+ -+ then: -+ def ex = thrown(IllegalArgumentException) -+ ex.message.contains("Unexpected character") -+ } -+ -+ def "should resolve path in JSON object"() { -+ given: -+ String json = '{"parent":{"child":{"key":"value"}}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/parent/child/key") -+ -+ then: -+ resolvedValue == "value" -+ } -+ -+ def "should resolve path in JSON array"() { -+ given: -+ String json = '{"array":[{"key":"value1"}, {"key":"value2"}]}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/array/1/key") -+ -+ then: -+ resolvedValue == "value2" -+ } -+ -+ def "should return null for invalid path resolution"() { -+ given: -+ String json = '{"parent":{"child":{"key":"value"}}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/invalid/path") -+ -+ then: -+ resolvedValue == null -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-1.11/testing/build.gradle.kts -index 545f5dffce..227a205ebd 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/build.gradle.kts -@@ -14,6 +14,14 @@ dependencies { - compileOnly("com.amazonaws:aws-java-sdk-dynamodb:1.11.106") - compileOnly("com.amazonaws:aws-java-sdk-sns:1.11.106") - compileOnly("com.amazonaws:aws-java-sdk-sqs:1.11.106") -+ compileOnly("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") -+ compileOnly("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") -+ compileOnly("com.amazonaws:aws-java-sdk-lambda:1.11.678") -+ -+ compileOnly("com.amazonaws:aws-java-sdk-bedrock:1.12.744") -+ compileOnly("com.amazonaws:aws-java-sdk-bedrockagent:1.12.744") -+ compileOnly("com.amazonaws:aws-java-sdk-bedrockagentruntime:1.12.744") -+ compileOnly("com.amazonaws:aws-java-sdk-bedrockruntime:1.12.744") - - // needed for SQS - using emq directly as localstack references emq v0.15.7 ie WITHOUT AWS trace header propagation - implementation("org.elasticmq:elasticmq-rest-sqs_2.13") -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockAgentClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockAgentClientTest.java -new file mode 100644 -index 0000000000..a5e5a63b09 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockAgentClientTest.java -@@ -0,0 +1,95 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Collections.singletonList; -+ -+import com.amazonaws.services.bedrockagent.AWSBedrockAgent; -+import com.amazonaws.services.bedrockagent.AWSBedrockAgentClientBuilder; -+import com.amazonaws.services.bedrockagent.model.GetAgentRequest; -+import com.amazonaws.services.bedrockagent.model.GetDataSourceRequest; -+import com.amazonaws.services.bedrockagent.model.GetKnowledgeBaseRequest; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import org.junit.jupiter.api.Test; -+ -+public abstract class AbstractBedrockAgentClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AWSBedrockAgentClientBuilder configureClient(AWSBedrockAgentClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return true; -+ } -+ -+ @Test -+ public void sendGetAgentRequest() throws Exception { -+ AWSBedrockAgent client = createClient(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, "{}")); -+ -+ Object response = client.getAgent(new GetAgentRequest().withAgentId("agentId")); -+ -+ assertRequestWithMockedResponse( -+ response, -+ client, -+ "AWSBedrockAgent", -+ "GetAgent", -+ "GET", -+ singletonList(equalTo(stringKey("aws.bedrock.agent.id"), "agentId"))); -+ } -+ -+ @Test -+ public void sendGetKnowledgeBaseRequest() throws Exception { -+ AWSBedrockAgent client = createClient(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, "{}")); -+ -+ Object response = -+ client.getKnowledgeBase( -+ new GetKnowledgeBaseRequest().withKnowledgeBaseId("knowledgeBaseId")); -+ -+ assertRequestWithMockedResponse( -+ response, -+ client, -+ "AWSBedrockAgent", -+ "GetKnowledgeBase", -+ "GET", -+ singletonList(equalTo(stringKey("aws.bedrock.knowledge_base.id"), "knowledgeBaseId"))); -+ } -+ -+ @Test -+ public void sendGetDataSourceRequest() throws Exception { -+ AWSBedrockAgent client = createClient(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, "{}")); -+ -+ Object response = -+ client.getDataSource( -+ new GetDataSourceRequest() -+ .withDataSourceId("datasourceId") -+ .withKnowledgeBaseId("knowledgeBaseId")); -+ -+ assertRequestWithMockedResponse( -+ response, -+ client, -+ "AWSBedrockAgent", -+ "GetDataSource", -+ "GET", -+ singletonList(equalTo(stringKey("aws.bedrock.data_source.id"), "datasourceId"))); -+ } -+ -+ private AWSBedrockAgent createClient() { -+ AWSBedrockAgentClientBuilder clientBuilder = AWSBedrockAgentClientBuilder.standard(); -+ return configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockClientTest.java -new file mode 100644 -index 0000000000..a97b893055 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockClientTest.java -@@ -0,0 +1,79 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Collections.singletonList; -+ -+import com.amazonaws.services.bedrock.AmazonBedrock; -+import com.amazonaws.services.bedrock.AmazonBedrockClientBuilder; -+import com.amazonaws.services.bedrock.model.GetGuardrailRequest; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import org.junit.jupiter.api.Test; -+ -+public abstract class AbstractBedrockClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AmazonBedrockClientBuilder configureClient(AmazonBedrockClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return true; -+ } -+ -+ @Test -+ public void sendRequestWithMockedResponse() throws Exception { -+ AmazonBedrockClientBuilder clientBuilder = AmazonBedrockClientBuilder.standard(); -+ AmazonBedrock client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ String body = -+ "{" -+ + " \"blockedInputMessaging\": \"string\"," -+ + " \"blockedOutputsMessaging\": \"string\"," -+ + " \"contentPolicy\": {}," -+ + " \"createdAt\": \"2024-06-12T18:31:45Z\"," -+ + " \"description\": \"string\"," -+ + " \"guardrailArn\": \"guardrailArn\"," -+ + " \"guardrailId\": \"guardrailId\"," -+ + " \"kmsKeyArn\": \"string\"," -+ + " \"name\": \"string\"," -+ + " \"sensitiveInformationPolicy\": {}," -+ + " \"status\": \"READY\"," -+ + " \"topicPolicy\": {" -+ + " \"topics\": [" -+ + " {" -+ + " \"definition\": \"string\"," -+ + " \"examples\": [ \"string\" ]," -+ + " \"name\": \"string\"," -+ + " \"type\": \"string\"" -+ + " }" -+ + " ]" -+ + " }," -+ + " \"updatedAt\": \"2024-06-12T18:31:48Z\"," -+ + " \"version\": \"DRAFT\"," -+ + " \"wordPolicy\": {}" -+ + "}"; -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, body)); -+ -+ Object response = -+ client.getGuardrail(new GetGuardrailRequest().withGuardrailIdentifier("guardrailId")); -+ -+ assertRequestWithMockedResponse( -+ response, -+ client, -+ "Bedrock", -+ "GetGuardrail", -+ "GET", -+ singletonList(equalTo(stringKey("aws.bedrock.guardrail.id"), "guardrailId"))); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockRuntimeClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockRuntimeClientTest.java -new file mode 100644 -index 0000000000..98a5873614 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractBedrockRuntimeClientTest.java -@@ -0,0 +1,135 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Arrays.asList; -+ -+import com.amazonaws.services.bedrockruntime.AmazonBedrockRuntime; -+import com.amazonaws.services.bedrockruntime.AmazonBedrockRuntimeClientBuilder; -+import com.amazonaws.services.bedrockruntime.model.InvokeModelRequest; -+import io.opentelemetry.sdk.testing.assertj.AttributeAssertion; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import java.nio.charset.StandardCharsets; -+import java.util.List; -+import java.util.stream.Stream; -+import org.junit.jupiter.params.ParameterizedTest; -+import org.junit.jupiter.params.provider.MethodSource; -+ -+public abstract class AbstractBedrockRuntimeClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AmazonBedrockRuntimeClientBuilder configureClient( -+ AmazonBedrockRuntimeClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return true; -+ } -+ -+ @ParameterizedTest -+ @MethodSource("testData") -+ public void sendRequestWithMockedResponse( -+ String modelId, -+ String requestBody, -+ String expectedResponse, -+ List expectedAttributes) -+ throws Exception { -+ AmazonBedrockRuntimeClientBuilder clientBuilder = AmazonBedrockRuntimeClientBuilder.standard(); -+ AmazonBedrockRuntime client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, expectedResponse)); -+ -+ client.invokeModel( -+ new InvokeModelRequest() -+ .withModelId(modelId) -+ .withBody(StandardCharsets.UTF_8.encode(requestBody))); -+ -+ assertRequestWithMockedResponse( -+ expectedResponse, client, "BedrockRuntime", "InvokeModel", "POST", expectedAttributes); -+ } -+ -+ private static Stream testData() { -+ return Stream.of( -+ new Object[] { -+ "ai21.jamba-1-5-mini-v1:0", -+ "{\"messages\":[{\"role\":\"user\",\"message\":\"Which LLM are you?\"}],\"max_tokens\":1000,\"top_p\":0.8,\"temperature\":0.7}", -+ "{\"choices\":[{\"finish_reason\":\"stop\"}],\"usage\":{\"prompt_tokens\":5,\"completion_tokens\":42}}", -+ asList( -+ equalTo(stringKey("gen_ai.request.model"), "ai21.jamba-1-5-mini-v1:0"), -+ equalTo(stringKey("gen_ai.system"), "aws.bedrock"), -+ equalTo(stringKey("gen_ai.request.max_tokens"), "1000"), -+ equalTo(stringKey("gen_ai.request.temperature"), "0.7"), -+ equalTo(stringKey("gen_ai.request.top_p"), "0.8"), -+ equalTo(stringKey("gen_ai.response.finish_reasons"), "[stop]"), -+ equalTo(stringKey("gen_ai.usage.input_tokens"), "5"), -+ equalTo(stringKey("gen_ai.usage.output_tokens"), "42")) -+ }, -+ new Object[] { -+ "amazon.titan-text-premier-v1:0", -+ "{\"inputText\":\"Hello, world!\",\"textGenerationConfig\":{\"temperature\":0.7,\"topP\":0.9,\"maxTokenCount\":100,\"stopSequences\":[\"END\"]}}", -+ "{\"inputTextTokenCount\":5,\"results\":[{\"tokenCount\":42,\"outputText\":\"Hi! I'm Titan, an AI assistant.\",\"completionReason\":\"stop\"}]}", -+ asList( -+ equalTo(stringKey("gen_ai.request.model"), "amazon.titan-text-premier-v1:0"), -+ equalTo(stringKey("gen_ai.system"), "aws.bedrock"), -+ equalTo(stringKey("gen_ai.request.max_tokens"), "100"), -+ equalTo(stringKey("gen_ai.request.temperature"), "0.7"), -+ equalTo(stringKey("gen_ai.request.top_p"), "0.9"), -+ equalTo(stringKey("gen_ai.response.finish_reasons"), "[stop]"), -+ equalTo(stringKey("gen_ai.usage.input_tokens"), "5"), -+ equalTo(stringKey("gen_ai.usage.output_tokens"), "42")) -+ }, -+ new Object[] { -+ "anthropic.claude-3-5-sonnet-20241022-v2:0", -+ "{\"anthropic_version\":\"bedrock-2023-05-31\",\"messages\":[{\"role\":\"user\",\"content\":\"Hello, world\"}],\"max_tokens\":100,\"temperature\":0.7,\"top_p\":0.9}", -+ "{\"stop_reason\":\"end_turn\",\"usage\":{\"input_tokens\":2095,\"output_tokens\":503}}", -+ asList( -+ equalTo( -+ stringKey("gen_ai.request.model"), "anthropic.claude-3-5-sonnet-20241022-v2:0"), -+ equalTo(stringKey("gen_ai.system"), "aws.bedrock"), -+ equalTo(stringKey("gen_ai.request.max_tokens"), "100"), -+ equalTo(stringKey("gen_ai.request.temperature"), "0.7"), -+ equalTo(stringKey("gen_ai.request.top_p"), "0.9"), -+ equalTo(stringKey("gen_ai.response.finish_reasons"), "[end_turn]"), -+ equalTo(stringKey("gen_ai.usage.input_tokens"), "2095"), -+ equalTo(stringKey("gen_ai.usage.output_tokens"), "503")) -+ }, -+ new Object[] { -+ "meta.llama3-70b-instruct-v1:0", -+ "{\"prompt\":\"<|begin_of_text|><|start_header_id|>user<|end_header_id|>\\\\nDescribe the purpose of a 'hello world' program in one line. <|eot_id|>\\\\n<|start_header_id|>assistant<|end_header_id|>\\\\n\",\"max_gen_len\":128,\"temperature\":0.1,\"top_p\":0.9}", -+ "{\"prompt_token_count\":2095,\"generation_token_count\":503,\"stop_reason\":\"stop\"}", -+ asList( -+ equalTo(stringKey("gen_ai.request.model"), "meta.llama3-70b-instruct-v1:0"), -+ equalTo(stringKey("gen_ai.system"), "aws.bedrock"), -+ equalTo(stringKey("gen_ai.request.max_tokens"), "128"), -+ equalTo(stringKey("gen_ai.request.temperature"), "0.1"), -+ equalTo(stringKey("gen_ai.request.top_p"), "0.9"), -+ equalTo(stringKey("gen_ai.response.finish_reasons"), "[stop]"), -+ equalTo(stringKey("gen_ai.usage.input_tokens"), "2095"), -+ equalTo(stringKey("gen_ai.usage.output_tokens"), "503")) -+ }, -+ new Object[] { -+ "cohere.command-r-v1:0", -+ "{\"message\":\"Convince me to write a LISP interpreter in one line.\",\"temperature\":0.8,\"max_tokens\":4096,\"p\":0.45}", -+ "{\"text\":\"test-output\",\"finish_reason\":\"COMPLETE\"}", -+ asList( -+ equalTo(stringKey("gen_ai.request.model"), "cohere.command-r-v1:0"), -+ equalTo(stringKey("gen_ai.system"), "aws.bedrock"), -+ equalTo(stringKey("gen_ai.request.max_tokens"), "4096"), -+ equalTo(stringKey("gen_ai.request.temperature"), "0.8"), -+ equalTo(stringKey("gen_ai.request.top_p"), "0.45"), -+ equalTo(stringKey("gen_ai.response.finish_reasons"), "[COMPLETE]"), -+ equalTo(stringKey("gen_ai.usage.input_tokens"), "9"), -+ equalTo(stringKey("gen_ai.usage.output_tokens"), "2")) -+ }); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractDynamoDbClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractDynamoDbClientTest.java -index 441a4a3a0b..529e317a65 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractDynamoDbClientTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractDynamoDbClientTest.java -@@ -11,10 +11,12 @@ import static io.opentelemetry.semconv.incubating.AwsIncubatingAttributes.AWS_DY - import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; - import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DbSystemIncubatingValues.DYNAMODB; - import static java.util.Collections.singletonList; -+import static org.junit.Assert.assertEquals; - - import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; - import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; - import com.amazonaws.services.dynamodbv2.model.CreateTableRequest; -+import com.amazonaws.services.dynamodbv2.model.DescribeTableRequest; - import io.opentelemetry.sdk.testing.assertj.AttributeAssertion; - import io.opentelemetry.testing.internal.armeria.common.HttpResponse; - import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -@@ -53,4 +55,39 @@ public abstract class AbstractDynamoDbClientTest extends AbstractBaseAwsClientTe - assertRequestWithMockedResponse( - response, client, "DynamoDBv2", "CreateTable", "POST", additionalAttributes); - } -+ -+ @Test -+ public void testGetTableArnWithMockedResponse() { -+ AmazonDynamoDBClientBuilder clientBuilder = AmazonDynamoDBClientBuilder.standard(); -+ AmazonDynamoDB client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ String tableName = "MockTable"; -+ String expectedArn = "arn:aws:dynamodb:us-west-2:123456789012:table/" + tableName; -+ -+ String body = -+ "{\n" -+ + "\"Table\": {\n" -+ + "\"TableName\": \"" -+ + tableName -+ + "\",\n" -+ + "\"TableArn\": \"" -+ + expectedArn -+ + "\"\n" -+ + "}\n" -+ + "}"; -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, body)); -+ -+ String actualArn = -+ client -+ .describeTable(new DescribeTableRequest().withTableName(tableName)) -+ .getTable() -+ .getTableArn(); -+ -+ assertEquals("Table ARN should match expected value", expectedArn, actualArn); -+ } - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractKinesisClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractKinesisClientTest.java -index ee6d1b7501..a21b1ebefa 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractKinesisClientTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractKinesisClientTest.java -@@ -12,13 +12,16 @@ import static java.util.Collections.singletonList; - import com.amazonaws.services.kinesis.AmazonKinesis; - import com.amazonaws.services.kinesis.AmazonKinesisClientBuilder; - import com.amazonaws.services.kinesis.model.DeleteStreamRequest; -+import com.amazonaws.services.kinesis.model.DescribeStreamRequest; - import io.opentelemetry.sdk.testing.assertj.AttributeAssertion; - import io.opentelemetry.testing.internal.armeria.common.HttpResponse; - import io.opentelemetry.testing.internal.armeria.common.HttpStatus; - import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import java.util.Arrays; - import java.util.List; - import java.util.function.Function; - import java.util.stream.Stream; -+import org.junit.Test; - import org.junit.jupiter.params.ParameterizedTest; - import org.junit.jupiter.params.provider.Arguments; - import org.junit.jupiter.params.provider.MethodSource; -@@ -54,6 +57,41 @@ public abstract class AbstractKinesisClientTest extends AbstractBaseAwsClientTes - response, client, "Kinesis", operation, "POST", additionalAttributes); - } - -+ @Test -+ public void sendRequestWithStreamArnMockedResponse() throws Exception { -+ AmazonKinesisClientBuilder clientBuilder = AmazonKinesisClientBuilder.standard(); -+ AmazonKinesis client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ String body = -+ "{\n" -+ + "\"StreamDescription\": {\n" -+ + "\"StreamARN\": \"arn:aws:kinesis:us-east-1:123456789012:stream/somestream\",\n" -+ + "\"StreamName\": \"somestream\",\n" -+ + "\"StreamStatus\": \"ACTIVE\",\n" -+ + "\"Shards\": []\n" -+ + "}\n" -+ + "}"; -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.JSON_UTF_8, body)); -+ -+ List additionalAttributes = -+ Arrays.asList( -+ equalTo(stringKey("aws.stream.name"), "somestream"), -+ equalTo( -+ stringKey("aws.stream.arn"), -+ "arn:aws:kinesis:us-east-1:123456789012:stream/somestream")); -+ -+ Object response = -+ client.describeStream(new DescribeStreamRequest().withStreamName("somestream")); -+ -+ assertRequestWithMockedResponse( -+ response, client, "Kinesis", "DescribeStream", "POST", additionalAttributes); -+ } -+ - private static Stream provideArguments() { - return Stream.of( - Arguments.of( -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractLambdaClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractLambdaClientTest.java -new file mode 100644 -index 0000000000..9f5a245ee7 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractLambdaClientTest.java -@@ -0,0 +1,72 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Collections.singletonList; -+ -+import com.amazonaws.services.lambda.AWSLambda; -+import com.amazonaws.services.lambda.AWSLambdaClientBuilder; -+import com.amazonaws.services.lambda.model.GetEventSourceMappingRequest; -+import com.amazonaws.services.lambda.model.GetFunctionRequest; -+import io.opentelemetry.sdk.testing.assertj.AttributeAssertion; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import java.util.List; -+import java.util.function.Function; -+import java.util.stream.Stream; -+import org.junit.jupiter.params.ParameterizedTest; -+import org.junit.jupiter.params.provider.Arguments; -+import org.junit.jupiter.params.provider.MethodSource; -+ -+public abstract class AbstractLambdaClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AWSLambdaClientBuilder configureClient(AWSLambdaClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return false; -+ } -+ -+ @ParameterizedTest -+ @MethodSource("provideArguments") -+ public void testSendRequestWithMockedResponse( -+ String operation, -+ List additionalAttributes, -+ Function call) -+ throws Exception { -+ -+ AWSLambdaClientBuilder clientBuilder = AWSLambdaClientBuilder.standard(); -+ -+ AWSLambda client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "")); -+ -+ Object response = call.apply(client); -+ assertRequestWithMockedResponse( -+ response, client, "AWSLambda", operation, "GET", additionalAttributes); -+ } -+ -+ private static Stream provideArguments() { -+ return Stream.of( -+ Arguments.of( -+ "GetEventSourceMapping", -+ singletonList(equalTo(stringKey("aws.lambda.resource_mapping.id"), "uuid")), -+ (Function) -+ c -> c.getEventSourceMapping(new GetEventSourceMappingRequest().withUUID("uuid"))), -+ Arguments.of( -+ "GetFunction", -+ singletonList(equalTo(stringKey("aws.lambda.function.name"), "functionName")), -+ (Function) -+ c -> c.getFunction(new GetFunctionRequest().withFunctionName("functionName")))); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractS3ClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractS3ClientTest.java -index 574165992f..5248d050b6 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractS3ClientTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractS3ClientTest.java -@@ -175,6 +175,7 @@ public abstract class AbstractS3ClientTest extends AbstractBaseAwsClientTest { - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "Amazon S3"), - equalTo(RPC_METHOD, "GetObject"), -+ equalTo(stringKey("aws.auth.account.access_key"), "my-access-key"), - equalTo(stringKey("aws.endpoint"), server.httpUri().toString()), - equalTo(stringKey("aws.agent"), "java-aws-sdk"), - equalTo(stringKey("aws.bucket.name"), "someBucket"), -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSecretsManagerClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSecretsManagerClientTest.java -new file mode 100644 -index 0000000000..03de6fce3f ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSecretsManagerClientTest.java -@@ -0,0 +1,62 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Collections.singletonList; -+ -+import com.amazonaws.services.secretsmanager.AWSSecretsManager; -+import com.amazonaws.services.secretsmanager.AWSSecretsManagerClientBuilder; -+import com.amazonaws.services.secretsmanager.model.CreateSecretRequest; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import org.junit.jupiter.api.Test; -+ -+public abstract class AbstractSecretsManagerClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AWSSecretsManagerClientBuilder configureClient( -+ AWSSecretsManagerClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return true; -+ } -+ -+ @Test -+ public void sendRequestWithMockedResponse() throws Exception { -+ AWSSecretsManagerClientBuilder clientBuilder = AWSSecretsManagerClientBuilder.standard(); -+ AWSSecretsManager client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ String body = -+ "{" -+ + "\"ARN\": \"arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3\"," -+ + "\"Name\": \"MyTestDatabaseSecret\"," -+ + "\"VersionId\": \"EXAMPLE1-90ab-cdef-fedc-ba987SECRET1\"" -+ + "}"; -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, body)); -+ -+ Object response = -+ client.createSecret( -+ new CreateSecretRequest().withName("secretName").withSecretString("secretValue")); -+ -+ assertRequestWithMockedResponse( -+ response, -+ client, -+ "AWSSecretsManager", -+ "CreateSecret", -+ "POST", -+ singletonList( -+ equalTo( -+ stringKey("aws.secretsmanager.secret.arn"), -+ "arn:aws:secretsmanager:us-west-2:123456789012:secret:MyTestDatabaseSecret-a1b2c3"))); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSnsClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSnsClientTest.java -index 3f272ba477..bea20f3d86 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSnsClientTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSnsClientTest.java -@@ -5,8 +5,10 @@ - - package io.opentelemetry.instrumentation.awssdk.v1_11; - -+import static io.opentelemetry.api.common.AttributeKey.stringKey; - import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; - import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME; -+import static java.util.Arrays.asList; - import static java.util.Collections.singletonList; - - import com.amazonaws.services.sns.AmazonSNS; -@@ -17,11 +19,7 @@ import io.opentelemetry.testing.internal.armeria.common.HttpResponse; - import io.opentelemetry.testing.internal.armeria.common.HttpStatus; - import io.opentelemetry.testing.internal.armeria.common.MediaType; - import java.util.List; --import java.util.function.Function; --import java.util.stream.Stream; --import org.junit.jupiter.params.ParameterizedTest; --import org.junit.jupiter.params.provider.Arguments; --import org.junit.jupiter.params.provider.MethodSource; -+import org.junit.jupiter.api.Test; - - public abstract class AbstractSnsClientTest extends AbstractBaseAwsClientTest { - -@@ -32,9 +30,8 @@ public abstract class AbstractSnsClientTest extends AbstractBaseAwsClientTest { - return true; - } - -- @ParameterizedTest -- @MethodSource("provideArguments") -- public void testSendRequestWithMockedResponse(Function call) throws Exception { -+ @Test -+ public void testSendRequestWithwithTopicArnMockedResponse() throws Exception { - AmazonSNSClientBuilder clientBuilder = AmazonSNSClientBuilder.standard(); - AmazonSNS client = - configureClient(clientBuilder) -@@ -55,24 +52,44 @@ public abstract class AbstractSnsClientTest extends AbstractBaseAwsClientTest { - server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, body)); - - List additionalAttributes = -- singletonList(equalTo(MESSAGING_DESTINATION_NAME, "somearn")); -+ asList( -+ equalTo(stringKey(MESSAGING_DESTINATION_NAME.getKey()), "somearn"), -+ equalTo(stringKey("aws.sns.topic.arn"), "somearn")); -+ -+ Object response = -+ client.publish(new PublishRequest().withMessage("somemessage").withTopicArn("somearn")); - -- Object response = call.apply(client); - assertRequestWithMockedResponse( - response, client, "SNS", "Publish", "POST", additionalAttributes); - } - -- private static Stream provideArguments() { -- return Stream.of( -- Arguments.of( -- (Function) -- c -> -- c.publish( -- new PublishRequest().withMessage("somemessage").withTopicArn("somearn"))), -- Arguments.of( -- (Function) -- c -> -- c.publish( -- new PublishRequest().withMessage("somemessage").withTargetArn("somearn")))); -+ @Test -+ public void testSendRequestWithwithTargetArnMockedResponse() throws Exception { -+ AmazonSNSClientBuilder clientBuilder = AmazonSNSClientBuilder.standard(); -+ AmazonSNS client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ String body = -+ "" -+ + " " -+ + " 567910cd-659e-55d4-8ccb-5aaf14679dc0" -+ + " " -+ + " " -+ + " d74b8436-ae13-5ab4-a9ff-ce54dfea72a0" -+ + " " -+ + ""; -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, body)); -+ -+ List additionalAttributes = -+ singletonList(equalTo(stringKey(MESSAGING_DESTINATION_NAME.getKey()), "somearn")); -+ -+ Object response = -+ client.publish(new PublishRequest().withMessage("somemessage").withTargetArn("somearn")); -+ assertRequestWithMockedResponse( -+ response, client, "SNS", "Publish", "POST", additionalAttributes); - } - } -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsSuppressReceiveSpansTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsSuppressReceiveSpansTest.java -index c0b4b13a17..4cfaf469d9 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsSuppressReceiveSpansTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsSuppressReceiveSpansTest.java -@@ -116,7 +116,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))), - trace -> - trace.hasSpansSatisfyingExactly( - span -> -@@ -146,7 +147,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(MESSAGING_OPERATION, "publish"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("testSdkSqs process") - .hasKind(SpanKind.CONSUMER) -@@ -174,7 +176,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(MESSAGING_OPERATION, "process"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("process child") - .hasParent(trace.getSpan(1)) -@@ -222,7 +225,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))), - trace -> - trace.hasSpansSatisfyingExactly( - span -> -@@ -252,7 +256,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(MESSAGING_OPERATION, "publish"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("testSdkSqs process") - .hasKind(SpanKind.CONSUMER) -@@ -280,7 +285,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(MESSAGING_OPERATION, "process"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("process child") - .hasParent(trace.getSpan(1)) -@@ -311,7 +317,8 @@ public abstract class AbstractSqsSuppressReceiveSpansTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")))); -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")))); - } - - @Test -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsTracingTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsTracingTest.java -index f1bfa126ca..dfb5b96550 100644 ---- a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsTracingTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractSqsTracingTest.java -@@ -150,7 +150,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))), - trace -> - trace.hasSpansSatisfyingExactly( - span -> { -@@ -179,7 +180,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(MESSAGING_OPERATION, "publish"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))); -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))); - - if (testCaptureHeaders) { - attributes.add( -@@ -220,7 +222,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(MESSAGING_DESTINATION_NAME, "testSdkSqs"), - equalTo(MESSAGING_OPERATION, "receive"), - equalTo(MESSAGING_BATCH_MESSAGE_COUNT, 1), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))); -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))); - - if (testCaptureHeaders) { - attributes.add( -@@ -260,7 +263,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(MESSAGING_OPERATION, "process"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))); -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))); - - if (testCaptureHeaders) { - attributes.add( -@@ -320,7 +324,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))), - trace -> - trace.hasSpansSatisfyingExactly( - span -> -@@ -350,7 +355,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(MESSAGING_OPERATION, "publish"), - satisfies( - MESSAGING_MESSAGE_ID, val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1"))), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))), - trace -> { - AtomicReference receiveSpan = new AtomicReference<>(); - AtomicReference processSpan = new AtomicReference<>(); -@@ -385,7 +391,8 @@ public abstract class AbstractSqsTracingTest { - equalTo(URL_FULL, "http://localhost:" + sqsPort), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("testSdkSqs receive") - .hasKind(SpanKind.CONSUMER) -@@ -419,7 +426,8 @@ public abstract class AbstractSqsTracingTest { - MessagingIncubatingAttributes - .MESSAGING_BATCH_MESSAGE_COUNT, - 1), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("testSdkSqs process") - .hasKind(SpanKind.CONSUMER) -@@ -452,7 +460,8 @@ public abstract class AbstractSqsTracingTest { - satisfies( - MESSAGING_MESSAGE_ID, - val -> val.isInstanceOf(String.class)), -- equalTo(NETWORK_PROTOCOL_VERSION, "1.1")), -+ equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x")), - span -> - span.hasName("process child") - .hasParent(processSpan.get()) -diff --git a/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractStepFunctionsClientTest.java b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractStepFunctionsClientTest.java -new file mode 100644 -index 0000000000..fc58ec3c9b ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-1.11/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v1_11/AbstractStepFunctionsClientTest.java -@@ -0,0 +1,78 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v1_11; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static java.util.Collections.singletonList; -+ -+import com.amazonaws.services.stepfunctions.AWSStepFunctions; -+import com.amazonaws.services.stepfunctions.AWSStepFunctionsClientBuilder; -+import com.amazonaws.services.stepfunctions.model.DescribeActivityRequest; -+import com.amazonaws.services.stepfunctions.model.DescribeStateMachineRequest; -+import io.opentelemetry.sdk.testing.assertj.AttributeAssertion; -+import io.opentelemetry.testing.internal.armeria.common.HttpResponse; -+import io.opentelemetry.testing.internal.armeria.common.HttpStatus; -+import io.opentelemetry.testing.internal.armeria.common.MediaType; -+import java.util.List; -+import java.util.function.Function; -+import java.util.stream.Stream; -+import org.junit.jupiter.params.ParameterizedTest; -+import org.junit.jupiter.params.provider.Arguments; -+import org.junit.jupiter.params.provider.MethodSource; -+ -+public abstract class AbstractStepFunctionsClientTest extends AbstractBaseAwsClientTest { -+ -+ public abstract AWSStepFunctionsClientBuilder configureClient( -+ AWSStepFunctionsClientBuilder client); -+ -+ @Override -+ protected boolean hasRequestId() { -+ return false; -+ } -+ -+ @ParameterizedTest -+ @MethodSource("provideArguments") -+ public void testSendRequestWithMockedResponse( -+ String operation, -+ List additionalAttributes, -+ Function call) -+ throws Exception { -+ -+ AWSStepFunctionsClientBuilder clientBuilder = AWSStepFunctionsClientBuilder.standard(); -+ -+ AWSStepFunctions client = -+ configureClient(clientBuilder) -+ .withEndpointConfiguration(endpoint) -+ .withCredentials(credentialsProvider) -+ .build(); -+ -+ server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, "")); -+ -+ Object response = call.apply(client); -+ assertRequestWithMockedResponse( -+ response, client, "AWSStepFunctions", operation, "POST", additionalAttributes); -+ } -+ -+ private static Stream provideArguments() { -+ return Stream.of( -+ Arguments.of( -+ "DescribeStateMachine", -+ singletonList( -+ equalTo(stringKey("aws.stepfunctions.state_machine.arn"), "stateMachineArn")), -+ (Function) -+ c -> -+ c.describeStateMachine( -+ new DescribeStateMachineRequest().withStateMachineArn("stateMachineArn"))), -+ Arguments.of( -+ "DescribeActivity", -+ singletonList(equalTo(stringKey("aws.stepfunctions.activity.arn"), "activityArn")), -+ (Function) -+ c -> -+ c.describeActivity( -+ new DescribeActivityRequest().withActivityArn("activityArn")))); -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/javaagent/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-2.2/javaagent/build.gradle.kts -index 7d3fa5d03c..6079232826 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/javaagent/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/javaagent/build.gradle.kts -@@ -104,6 +104,9 @@ dependencies { - testLibrary("software.amazon.awssdk:sqs:2.2.0") - testLibrary("software.amazon.awssdk:sns:2.2.0") - testLibrary("software.amazon.awssdk:ses:2.2.0") -+ testLibrary("software.amazon.awssdk:sfn:2.2.0") -+ testLibrary("software.amazon.awssdk:secretsmanager:2.2.0") -+ testLibrary("software.amazon.awssdk:lambda:2.2.0") - } - - val latestDepTest = findProperty("testLatestDeps") as Boolean -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library-autoconfigure/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-2.2/library-autoconfigure/build.gradle.kts -index d493f83a86..0bb91a17c3 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library-autoconfigure/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library-autoconfigure/build.gradle.kts -@@ -22,6 +22,9 @@ dependencies { - testLibrary("software.amazon.awssdk:s3:2.2.0") - testLibrary("software.amazon.awssdk:sqs:2.2.0") - testLibrary("software.amazon.awssdk:sns:2.2.0") -+ testLibrary("software.amazon.awssdk:sfn:2.2.0") -+ testLibrary("software.amazon.awssdk:secretsmanager:2.2.0") -+ testLibrary("software.amazon.awssdk:lambda:2.2.0") - } - - tasks { -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-2.2/library/build.gradle.kts -index 3b7381a8ba..6f77951710 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/build.gradle.kts -@@ -22,6 +22,9 @@ dependencies { - testLibrary("software.amazon.awssdk:rds:2.2.0") - testLibrary("software.amazon.awssdk:s3:2.2.0") - testLibrary("software.amazon.awssdk:ses:2.2.0") -+ testLibrary("software.amazon.awssdk:sfn:2.2.0") -+ testLibrary("software.amazon.awssdk:secretsmanager:2.2.0") -+ testLibrary("software.amazon.awssdk:lambda:2.2.0") - } - - testing { -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsExperimentalAttributes.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsExperimentalAttributes.java -new file mode 100644 -index 0000000000..fd951ffe37 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsExperimentalAttributes.java -@@ -0,0 +1,80 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v2_2.internal; -+ -+import static io.opentelemetry.api.common.AttributeKey.stringKey; -+ -+import io.opentelemetry.api.common.AttributeKey; -+ -+final class AwsExperimentalAttributes { -+ static final AttributeKey AWS_BUCKET_NAME = stringKey("aws.bucket.name"); -+ static final AttributeKey AWS_QUEUE_URL = stringKey("aws.queue.url"); -+ static final AttributeKey AWS_QUEUE_NAME = stringKey("aws.queue.name"); -+ static final AttributeKey AWS_STREAM_NAME = stringKey("aws.stream.name"); -+ static final AttributeKey AWS_STREAM_ARN = stringKey("aws.stream.arn"); -+ static final AttributeKey AWS_TABLE_NAME = stringKey("aws.table.name"); -+ static final AttributeKey AWS_GUARDRAIL_ID = stringKey("aws.bedrock.guardrail.id"); -+ static final AttributeKey AWS_GUARDRAIL_ARN = stringKey("aws.bedrock.guardrail.arn"); -+ static final AttributeKey AWS_AGENT_ID = stringKey("aws.bedrock.agent.id"); -+ static final AttributeKey AWS_DATA_SOURCE_ID = stringKey("aws.bedrock.data_source.id"); -+ static final AttributeKey AWS_KNOWLEDGE_BASE_ID = -+ stringKey("aws.bedrock.knowledge_base.id"); -+ -+ // TODO: Merge in gen_ai attributes in opentelemetry-semconv-incubating once upgrade to v1.26.0 -+ static final AttributeKey GEN_AI_MODEL = stringKey("gen_ai.request.model"); -+ static final AttributeKey GEN_AI_SYSTEM = stringKey("gen_ai.system"); -+ -+ static final AttributeKey GEN_AI_REQUEST_MAX_TOKENS = -+ stringKey("gen_ai.request.max_tokens"); -+ -+ static final AttributeKey GEN_AI_REQUEST_TEMPERATURE = -+ stringKey("gen_ai.request.temperature"); -+ -+ static final AttributeKey GEN_AI_REQUEST_TOP_P = stringKey("gen_ai.request.top_p"); -+ -+ static final AttributeKey GEN_AI_RESPONSE_FINISH_REASONS = -+ stringKey("gen_ai.response.finish_reasons"); -+ -+ static final AttributeKey GEN_AI_USAGE_INPUT_TOKENS = -+ stringKey("gen_ai.usage.input_tokens"); -+ -+ static final AttributeKey GEN_AI_USAGE_OUTPUT_TOKENS = -+ stringKey("gen_ai.usage.output_tokens"); -+ -+ static final AttributeKey AWS_STATE_MACHINE_ARN = -+ stringKey("aws.stepfunctions.state_machine.arn"); -+ -+ static final AttributeKey AWS_STEP_FUNCTIONS_ACTIVITY_ARN = -+ stringKey("aws.stepfunctions.activity.arn"); -+ -+ static final AttributeKey AWS_SNS_TOPIC_ARN = stringKey("aws.sns.topic.arn"); -+ -+ static final AttributeKey AWS_SECRET_ARN = stringKey("aws.secretsmanager.secret.arn"); -+ -+ static final AttributeKey AWS_LAMBDA_NAME = stringKey("aws.lambda.function.name"); -+ -+ static final AttributeKey AWS_LAMBDA_ARN = stringKey("aws.lambda.function.arn"); -+ -+ static final AttributeKey AWS_LAMBDA_RESOURCE_ID = -+ stringKey("aws.lambda.resource_mapping.id"); -+ -+ static final AttributeKey AWS_TABLE_ARN = stringKey("aws.table.arn"); -+ -+ static final AttributeKey AWS_AUTH_ACCESS_KEY = stringKey("aws.auth.account.access_key"); -+ -+ static final AttributeKey AWS_AUTH_REGION = stringKey("aws.auth.region"); -+ -+ static boolean isGenAiAttribute(String attributeKey) { -+ return attributeKey.equals(GEN_AI_REQUEST_MAX_TOKENS.getKey()) -+ || attributeKey.equals(GEN_AI_REQUEST_TEMPERATURE.getKey()) -+ || attributeKey.equals(GEN_AI_REQUEST_TOP_P.getKey()) -+ || attributeKey.equals(GEN_AI_RESPONSE_FINISH_REASONS.getKey()) -+ || attributeKey.equals(GEN_AI_USAGE_INPUT_TOKENS.getKey()) -+ || attributeKey.equals(GEN_AI_USAGE_OUTPUT_TOKENS.getKey()); -+ } -+ -+ private AwsExperimentalAttributes() {} -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequest.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequest.java -index 02d92ca070..aa98cd62c7 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequest.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequest.java -@@ -5,11 +5,20 @@ - - package io.opentelemetry.instrumentation.awssdk.v2_2.internal; - -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCK; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKAGENTOPERATION; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKAGENTRUNTIMEOPERATION; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKDATASOURCEOPERATION; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKKNOWLEDGEBASEOPERATION; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKRUNTIME; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.DYNAMODB; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.KINESIS; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.LAMBDA; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.S3; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.SECRETSMANAGER; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.SNS; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.SQS; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.STEPFUNCTION; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.FieldMapping.request; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.FieldMapping.response; - -@@ -34,6 +43,52 @@ enum AwsSdkRequest { - SnsRequest(SNS, "SnsRequest"), - SqsRequest(SQS, "SqsRequest"), - KinesisRequest(KINESIS, "KinesisRequest"), -+ -+ BedrockRequest(BEDROCK, "BedrockRequest"), -+ BedrockAgentRuntimeRequest(BEDROCKAGENTRUNTIMEOPERATION, "BedrockAgentRuntimeRequest"), -+ BedrockRuntimeRequest(BEDROCKRUNTIME, "BedrockRuntimeRequest"), -+ // BedrockAgent API based requests. We only support operations that are related to -+ // Agent/DataSources/KnowledgeBases -+ // resources and the request/response context contains the resource ID. -+ BedrockCreateAgentActionGroupRequest(BEDROCKAGENTOPERATION, "CreateAgentActionGroupRequest"), -+ BedrockCreateAgentAliasRequest(BEDROCKAGENTOPERATION, "CreateAgentAliasRequest"), -+ BedrockDeleteAgentActionGroupRequest(BEDROCKAGENTOPERATION, "DeleteAgentActionGroupRequest"), -+ BedrockDeleteAgentAliasRequest(BEDROCKAGENTOPERATION, "DeleteAgentAliasRequest"), -+ BedrockDeleteAgentVersionRequest(BEDROCKAGENTOPERATION, "DeleteAgentVersionRequest"), -+ BedrockGetAgentActionGroupRequest(BEDROCKAGENTOPERATION, "GetAgentActionGroupRequest"), -+ BedrockGetAgentAliasRequest(BEDROCKAGENTOPERATION, "GetAgentAliasRequest"), -+ BedrockGetAgentRequest(BEDROCKAGENTOPERATION, "GetAgentRequest"), -+ BedrockGetAgentVersionRequest(BEDROCKAGENTOPERATION, "GetAgentVersionRequest"), -+ BedrockListAgentActionGroupsRequest(BEDROCKAGENTOPERATION, "ListAgentActionGroupsRequest"), -+ BedrockListAgentAliasesRequest(BEDROCKAGENTOPERATION, "ListAgentAliasesRequest"), -+ BedrockListAgentKnowledgeBasesRequest(BEDROCKAGENTOPERATION, "ListAgentKnowledgeBasesRequest"), -+ BedrocListAgentVersionsRequest(BEDROCKAGENTOPERATION, "ListAgentVersionsRequest"), -+ BedrockPrepareAgentRequest(BEDROCKAGENTOPERATION, "PrepareAgentRequest"), -+ BedrockUpdateAgentActionGroupRequest(BEDROCKAGENTOPERATION, "UpdateAgentActionGroupRequest"), -+ BedrockUpdateAgentAliasRequest(BEDROCKAGENTOPERATION, "UpdateAgentAliasRequest"), -+ BedrockUpdateAgentRequest(BEDROCKAGENTOPERATION, "UpdateAgentRequest"), -+ BedrockBedrockAgentRequest(BEDROCKAGENTOPERATION, "BedrockAgentRequest"), -+ BedrockDeleteDataSourceRequest(BEDROCKDATASOURCEOPERATION, "DeleteDataSourceRequest"), -+ BedrockGetDataSourceRequest(BEDROCKDATASOURCEOPERATION, "GetDataSourceRequest"), -+ BedrockUpdateDataSourceRequest(BEDROCKDATASOURCEOPERATION, "UpdateDataSourceRequest"), -+ BedrocAssociateAgentKnowledgeBaseRequest( -+ BEDROCKKNOWLEDGEBASEOPERATION, "AssociateAgentKnowledgeBaseRequest"), -+ BedrockCreateDataSourceRequest(BEDROCKKNOWLEDGEBASEOPERATION, "CreateDataSourceRequest"), -+ BedrockDeleteKnowledgeBaseRequest(BEDROCKKNOWLEDGEBASEOPERATION, "DeleteKnowledgeBaseRequest"), -+ BedrockDisassociateAgentKnowledgeBaseRequest( -+ BEDROCKKNOWLEDGEBASEOPERATION, "DisassociateAgentKnowledgeBaseRequest"), -+ BedrockGetAgentKnowledgeBaseRequest( -+ BEDROCKKNOWLEDGEBASEOPERATION, "GetAgentKnowledgeBaseRequest"), -+ BedrockGetKnowledgeBaseRequest(BEDROCKKNOWLEDGEBASEOPERATION, "GetKnowledgeBaseRequest"), -+ BedrockListDataSourcesRequest(BEDROCKKNOWLEDGEBASEOPERATION, "ListDataSourcesRequest"), -+ BedrockUpdateAgentKnowledgeBaseRequest( -+ BEDROCKKNOWLEDGEBASEOPERATION, "UpdateAgentKnowledgeBaseRequest"), -+ -+ SfnRequest(STEPFUNCTION, "SfnRequest"), -+ -+ SecretsManagerRequest(SECRETSMANAGER, "SecretsManagerRequest"), -+ -+ LambdaRequest(LAMBDA, "LambdaRequest"), - // specific requests - BatchGetItem( - DYNAMODB, -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequestType.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequestType.java -index 274ec27194..d8dba6cf5c 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequestType.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/AwsSdkRequestType.java -@@ -5,7 +5,34 @@ - - package io.opentelemetry.instrumentation.awssdk.v2_2.internal; - -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_AGENT_ID; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_BUCKET_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_DATA_SOURCE_ID; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_GUARDRAIL_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_GUARDRAIL_ID; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_KNOWLEDGE_BASE_ID; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_LAMBDA_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_LAMBDA_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_LAMBDA_RESOURCE_ID; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_QUEUE_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_QUEUE_URL; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_SECRET_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_SNS_TOPIC_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_STATE_MACHINE_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_STEP_FUNCTIONS_ACTIVITY_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_STREAM_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_STREAM_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_TABLE_ARN; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_TABLE_NAME; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_MODEL; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_REQUEST_MAX_TOKENS; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_REQUEST_TEMPERATURE; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_REQUEST_TOP_P; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_RESPONSE_FINISH_REASONS; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_USAGE_INPUT_TOKENS; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_USAGE_OUTPUT_TOKENS; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.FieldMapping.request; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.FieldMapping.response; - - import io.opentelemetry.api.common.AttributeKey; - import java.util.Collections; -@@ -13,16 +40,64 @@ import java.util.List; - import java.util.Map; - - enum AwsSdkRequestType { -- S3(request("aws.bucket.name", "Bucket")), -- SQS(request("aws.queue.url", "QueueUrl"), request("aws.queue.name", "QueueName")), -- KINESIS(request("aws.stream.name", "StreamName")), -- DYNAMODB(request("aws.table.name", "TableName")), -+ S3(request(AWS_BUCKET_NAME.getKey(), "Bucket")), -+ -+ SQS(request(AWS_QUEUE_URL.getKey(), "QueueUrl"), request(AWS_QUEUE_NAME.getKey(), "QueueName")), -+ -+ KINESIS( -+ request(AWS_STREAM_NAME.getKey(), "StreamName"), -+ request(AWS_STREAM_ARN.getKey(), "StreamARN")), -+ -+ DYNAMODB( -+ request(AWS_TABLE_NAME.getKey(), "TableName"), -+ response(AWS_TABLE_ARN.getKey(), "Table.TableArn")), -+ - SNS( - /* - * Only one of TopicArn and TargetArn are permitted on an SNS request. - */ - request(AttributeKeys.MESSAGING_DESTINATION_NAME.getKey(), "TargetArn"), -- request(AttributeKeys.MESSAGING_DESTINATION_NAME.getKey(), "TopicArn")); -+ request(AttributeKeys.MESSAGING_DESTINATION_NAME.getKey(), "TopicArn"), -+ request(AWS_SNS_TOPIC_ARN.getKey(), "TopicArn")), -+ -+ BEDROCK( -+ request(AWS_GUARDRAIL_ID.getKey(), "guardrailIdentifier"), -+ response(AWS_GUARDRAIL_ARN.getKey(), "guardrailArn")), -+ BEDROCKAGENTOPERATION( -+ request(AWS_AGENT_ID.getKey(), "agentId"), response(AWS_AGENT_ID.getKey(), "agentId")), -+ BEDROCKAGENTRUNTIMEOPERATION( -+ request(AWS_AGENT_ID.getKey(), "agentId"), -+ response(AWS_AGENT_ID.getKey(), "agentId"), -+ request(AWS_KNOWLEDGE_BASE_ID.getKey(), "knowledgeBaseId"), -+ response(AWS_KNOWLEDGE_BASE_ID.getKey(), "knowledgeBaseId")), -+ BEDROCKDATASOURCEOPERATION( -+ request(AWS_DATA_SOURCE_ID.getKey(), "dataSourceId"), -+ response(AWS_DATA_SOURCE_ID.getKey(), "dataSourceId")), -+ BEDROCKKNOWLEDGEBASEOPERATION( -+ request(AWS_KNOWLEDGE_BASE_ID.getKey(), "knowledgeBaseId"), -+ response(AWS_KNOWLEDGE_BASE_ID.getKey(), "knowledgeBaseId")), -+ BEDROCKRUNTIME( -+ request(GEN_AI_MODEL.getKey(), "modelId"), -+ request(GEN_AI_REQUEST_MAX_TOKENS.getKey(), "body"), -+ request(GEN_AI_REQUEST_TEMPERATURE.getKey(), "body"), -+ request(GEN_AI_REQUEST_TOP_P.getKey(), "body"), -+ request(GEN_AI_USAGE_INPUT_TOKENS.getKey(), "body"), -+ response(GEN_AI_RESPONSE_FINISH_REASONS.getKey(), "body"), -+ response(GEN_AI_USAGE_INPUT_TOKENS.getKey(), "body"), -+ response(GEN_AI_USAGE_OUTPUT_TOKENS.getKey(), "body")), -+ -+ STEPFUNCTION( -+ request(AWS_STATE_MACHINE_ARN.getKey(), "stateMachineArn"), -+ request(AWS_STEP_FUNCTIONS_ACTIVITY_ARN.getKey(), "activityArn")), -+ -+ // SNS(request(AWS_SNS_TOPIC_ARN.getKey(), "TopicArn")), -+ -+ SECRETSMANAGER(response(AWS_SECRET_ARN.getKey(), "ARN")), -+ -+ LAMBDA( -+ request(AWS_LAMBDA_NAME.getKey(), "FunctionName"), -+ request(AWS_LAMBDA_RESOURCE_ID.getKey(), "UUID"), -+ response(AWS_LAMBDA_ARN.getKey(), "Configuration.FunctionArn")); - - // Wrapping in unmodifiableMap - @SuppressWarnings("ImmutableEnumChecker") -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParser.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParser.java -new file mode 100644 -index 0000000000..9812f1afa5 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParser.java -@@ -0,0 +1,279 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v2_2.internal; -+ -+import java.util.ArrayList; -+import java.util.HashMap; -+import java.util.List; -+import java.util.Map; -+ -+/** -+ * This class is internal and is hence not for public use. Its APIs are unstable and can change at -+ * any time. -+ */ -+public class BedrockJsonParser { -+ -+ // Prevent instantiation -+ private BedrockJsonParser() { -+ throw new UnsupportedOperationException("Utility class"); -+ } -+ -+ /** -+ * This class is internal and is hence not for public use. Its APIs are unstable and can change at -+ * any time. -+ */ -+ public static LlmJson parse(String jsonString) { -+ JsonParser parser = new JsonParser(jsonString); -+ Map jsonBody = parser.parse(); -+ return new LlmJson(jsonBody); -+ } -+ -+ static class JsonParser { -+ private final String json; -+ private int position; -+ -+ public JsonParser(String json) { -+ this.json = json.trim(); -+ this.position = 0; -+ } -+ -+ private void skipWhitespace() { -+ while (position < json.length() && Character.isWhitespace(json.charAt(position))) { -+ position++; -+ } -+ } -+ -+ private char currentChar() { -+ return json.charAt(position); -+ } -+ -+ private static boolean isHexDigit(char c) { -+ return (c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'); -+ } -+ -+ private void expect(char c) { -+ skipWhitespace(); -+ if (currentChar() != c) { -+ throw new IllegalArgumentException( -+ "Expected '" + c + "' but found '" + currentChar() + "'"); -+ } -+ position++; -+ } -+ -+ private String readString() { -+ skipWhitespace(); -+ expect('"'); // Ensure the string starts with a quote -+ StringBuilder result = new StringBuilder(); -+ while (currentChar() != '"') { -+ // Handle escape sequences -+ if (currentChar() == '\\') { -+ position++; // Move past the backslash -+ if (position >= json.length()) { -+ throw new IllegalArgumentException("Unexpected end of input in string escape sequence"); -+ } -+ char escapeChar = currentChar(); -+ switch (escapeChar) { -+ case '"': -+ case '\\': -+ case '/': -+ result.append(escapeChar); -+ break; -+ case 'b': -+ result.append('\b'); -+ break; -+ case 'f': -+ result.append('\f'); -+ break; -+ case 'n': -+ result.append('\n'); -+ break; -+ case 'r': -+ result.append('\r'); -+ break; -+ case 't': -+ result.append('\t'); -+ break; -+ case 'u': // Unicode escape sequence -+ if (position + 4 >= json.length()) { -+ throw new IllegalArgumentException("Invalid unicode escape sequence in string"); -+ } -+ char[] hexChars = new char[4]; -+ for (int i = 0; i < 4; i++) { -+ position++; // Move to the next character -+ char hexChar = json.charAt(position); -+ if (!isHexDigit(hexChar)) { -+ throw new IllegalArgumentException( -+ "Invalid hexadecimal digit in unicode escape sequence"); -+ } -+ hexChars[i] = hexChar; -+ } -+ int unicodeValue = Integer.parseInt(new String(hexChars), 16); -+ result.append((char) unicodeValue); -+ break; -+ default: -+ throw new IllegalArgumentException("Invalid escape character: \\" + escapeChar); -+ } -+ position++; -+ } else { -+ result.append(currentChar()); -+ position++; -+ } -+ } -+ position++; // Skip closing quote -+ return result.toString(); -+ } -+ -+ private Object readValue() { -+ skipWhitespace(); -+ char c = currentChar(); -+ -+ if (c == '"') { -+ return readString(); -+ } else if (Character.isDigit(c)) { -+ return readScopedNumber(); -+ } else if (c == '{') { -+ return readObject(); // JSON Objects -+ } else if (c == '[') { -+ return readArray(); // JSON Arrays -+ } else if (json.startsWith("true", position)) { -+ position += 4; -+ return true; -+ } else if (json.startsWith("false", position)) { -+ position += 5; -+ return false; -+ } else if (json.startsWith("null", position)) { -+ position += 4; -+ return null; // JSON null -+ } else { -+ throw new IllegalArgumentException("Unexpected character: " + c); -+ } -+ } -+ -+ private Number readScopedNumber() { -+ int start = position; -+ -+ // Consume digits and the optional decimal point -+ while (position < json.length() -+ && (Character.isDigit(json.charAt(position)) || json.charAt(position) == '.')) { -+ position++; -+ } -+ -+ String number = json.substring(start, position); -+ -+ if (number.contains(".")) { -+ double value = Double.parseDouble(number); -+ if (value < 0.0 || value > 1.0) { -+ throw new IllegalArgumentException( -+ "Value out of bounds for Bedrock Floating Point Attribute: " + number); -+ } -+ return value; -+ } else { -+ return Integer.parseInt(number); -+ } -+ } -+ -+ private Map readObject() { -+ Map map = new HashMap<>(); -+ expect('{'); -+ skipWhitespace(); -+ while (currentChar() != '}') { -+ String key = readString(); -+ expect(':'); -+ Object value = readValue(); -+ map.put(key, value); -+ skipWhitespace(); -+ if (currentChar() == ',') { -+ position++; -+ } -+ } -+ position++; // Skip closing brace -+ return map; -+ } -+ -+ private List readArray() { -+ List list = new ArrayList<>(); -+ expect('['); -+ skipWhitespace(); -+ while (currentChar() != ']') { -+ list.add(readValue()); -+ skipWhitespace(); -+ if (currentChar() == ',') { -+ position++; -+ } -+ } -+ position++; -+ return list; -+ } -+ -+ public Map parse() { -+ return readObject(); -+ } -+ } -+ -+ // Resolves paths in a JSON structure -+ static class JsonPathResolver { -+ -+ // Private constructor to prevent instantiation -+ private JsonPathResolver() { -+ throw new UnsupportedOperationException("Utility class"); -+ } -+ -+ public static Object resolvePath(LlmJson llmJson, String... paths) { -+ for (String path : paths) { -+ Object value = resolvePath(llmJson.getJsonBody(), path); -+ if (value != null) { -+ return value; -+ } -+ } -+ return null; -+ } -+ -+ private static Object resolvePath(Map json, String path) { -+ String[] keys = path.split("/"); -+ Object current = json; -+ -+ for (String key : keys) { -+ if (key.isEmpty()) { -+ continue; -+ } -+ -+ if (current instanceof Map) { -+ current = ((Map) current).get(key); -+ } else if (current instanceof List) { -+ try { -+ int index = Integer.parseInt(key); -+ current = ((List) current).get(index); -+ } catch (NumberFormatException | IndexOutOfBoundsException e) { -+ return null; -+ } -+ } else { -+ return null; -+ } -+ -+ if (current == null) { -+ return null; -+ } -+ } -+ return current; -+ } -+ } -+ -+ /** -+ * This class is internal and is hence not for public use. Its APIs are unstable and can change at -+ * any time. -+ */ -+ public static class LlmJson { -+ private final Map jsonBody; -+ -+ public LlmJson(Map jsonBody) { -+ this.jsonBody = jsonBody; -+ } -+ -+ public Map getJsonBody() { -+ return jsonBody; -+ } -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/FieldMapper.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/FieldMapper.java -index 9e7aeacbce..9a38a753ca 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/FieldMapper.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/FieldMapper.java -@@ -65,8 +65,13 @@ class FieldMapper { - for (int i = 1; i < path.size() && target != null; i++) { - target = next(target, path.get(i)); - } -+ String value; - if (target != null) { -- String value = serializer.serialize(target); -+ if (AwsExperimentalAttributes.isGenAiAttribute(fieldMapping.getAttribute())) { -+ value = serializer.serialize(fieldMapping.getAttribute(), target); -+ } else { -+ value = serializer.serialize(target); -+ } - if (!StringUtils.isEmpty(value)) { - span.setAttribute(fieldMapping.getAttribute(), value); - } -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/Serializer.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/Serializer.java -index 7ae1590152..5b7a188914 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/Serializer.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/Serializer.java -@@ -7,11 +7,14 @@ package io.opentelemetry.instrumentation.awssdk.v2_2.internal; - - import java.io.IOException; - import java.io.InputStream; -+import java.util.Arrays; - import java.util.Collection; - import java.util.Map; -+import java.util.Objects; - import java.util.Optional; - import java.util.stream.Collectors; - import javax.annotation.Nullable; -+import software.amazon.awssdk.core.SdkBytes; - import software.amazon.awssdk.core.SdkPojo; - import software.amazon.awssdk.http.ContentStreamProvider; - import software.amazon.awssdk.http.SdkHttpFullRequest; -@@ -41,6 +44,45 @@ class Serializer { - return target.toString(); - } - -+ @Nullable -+ String serialize(String attributeName, Object target) { -+ try { -+ // Extract JSON string from target if it is a Bedrock Runtime JSON blob -+ String jsonString; -+ if (target instanceof SdkBytes) { -+ jsonString = ((SdkBytes) target).asUtf8String(); -+ } else { -+ if (target != null) { -+ return target.toString(); -+ } -+ return null; -+ } -+ -+ // Parse the LLM JSON string into a Map -+ BedrockJsonParser.LlmJson llmJson = BedrockJsonParser.parse(jsonString); -+ -+ // Use attribute name to extract the corresponding value -+ switch (attributeName) { -+ case "gen_ai.request.max_tokens": -+ return getMaxTokens(llmJson); -+ case "gen_ai.request.temperature": -+ return getTemperature(llmJson); -+ case "gen_ai.request.top_p": -+ return getTopP(llmJson); -+ case "gen_ai.response.finish_reasons": -+ return getFinishReasons(llmJson); -+ case "gen_ai.usage.input_tokens": -+ return getInputTokens(llmJson); -+ case "gen_ai.usage.output_tokens": -+ return getOutputTokens(llmJson); -+ default: -+ return null; -+ } -+ } catch (RuntimeException e) { -+ return null; -+ } -+ } -+ - @Nullable - private static String serialize(SdkPojo sdkPojo) { - ProtocolMarshaller marshaller = -@@ -65,4 +107,167 @@ class Serializer { - String serialized = collection.stream().map(this::serialize).collect(Collectors.joining(",")); - return (StringUtils.isEmpty(serialized) ? null : "[" + serialized + "]"); - } -+ -+ @Nullable -+ private static String approximateTokenCount( -+ BedrockJsonParser.LlmJson jsonBody, String... textPaths) { -+ return Arrays.stream(textPaths) -+ .map( -+ path -> { -+ Object value = BedrockJsonParser.JsonPathResolver.resolvePath(jsonBody, path); -+ if (value instanceof String) { -+ int tokenEstimate = (int) Math.ceil(((String) value).length() / 6.0); -+ return Integer.toString(tokenEstimate); -+ } -+ return null; -+ }) -+ .filter(Objects::nonNull) -+ .findFirst() -+ .orElse(null); -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/max_new_tokens" -+ // Amazon Titan -> "/textGenerationConfig/maxTokenCount" -+ // Anthropic Claude -> "/max_tokens" -+ // Cohere Command -> "/max_tokens" -+ // Cohere Command R -> "/max_tokens" -+ // AI21 Jamba -> "/max_tokens" -+ // Meta Llama -> "/max_gen_len" -+ // Mistral AI -> "/max_tokens" -+ @Nullable -+ private static String getMaxTokens(BedrockJsonParser.LlmJson jsonBody) { -+ Object value = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, -+ "/max_tokens", -+ "/max_gen_len", -+ "/textGenerationConfig/maxTokenCount", -+ "inferenceConfig/max_new_tokens"); -+ return value != null ? String.valueOf(value) : null; -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/temperature" -+ // Amazon Titan -> "/textGenerationConfig/temperature" -+ // Anthropic Claude -> "/temperature" -+ // Cohere Command -> "/temperature" -+ // Cohere Command R -> "/temperature" -+ // AI21 Jamba -> "/temperature" -+ // Meta Llama -> "/temperature" -+ // Mistral AI -> "/temperature" -+ @Nullable -+ private static String getTemperature(BedrockJsonParser.LlmJson jsonBody) { -+ Object value = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, -+ "/temperature", -+ "/textGenerationConfig/temperature", -+ "/inferenceConfig/temperature"); -+ return value != null ? String.valueOf(value) : null; -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/inferenceConfig/top_p" -+ // Amazon Titan -> "/textGenerationConfig/topP" -+ // Anthropic Claude -> "/top_p" -+ // Cohere Command -> "/p" -+ // Cohere Command R -> "/p" -+ // AI21 Jamba -> "/top_p" -+ // Meta Llama -> "/top_p" -+ // Mistral AI -> "/top_p" -+ @Nullable -+ private static String getTopP(BedrockJsonParser.LlmJson jsonBody) { -+ Object value = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, "/top_p", "/p", "/textGenerationConfig/topP", "/inferenceConfig/top_p"); -+ return value != null ? String.valueOf(value) : null; -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/stopReason" -+ // Amazon Titan -> "/results/0/completionReason" -+ // Anthropic Claude -> "/stop_reason" -+ // Cohere Command -> "/generations/0/finish_reason" -+ // Cohere Command R -> "/finish_reason" -+ // AI21 Jamba -> "/choices/0/finish_reason" -+ // Meta Llama -> "/stop_reason" -+ // Mistral AI -> "/outputs/0/stop_reason" -+ @Nullable -+ private static String getFinishReasons(BedrockJsonParser.LlmJson jsonBody) { -+ Object value = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, -+ "/stopReason", -+ "/finish_reason", -+ "/stop_reason", -+ "/results/0/completionReason", -+ "/generations/0/finish_reason", -+ "/choices/0/finish_reason", -+ "/outputs/0/stop_reason"); -+ -+ return value != null ? "[" + value + "]" : null; -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/usage/inputTokens" -+ // Amazon Titan -> "/inputTextTokenCount" -+ // Anthropic Claude -> "/usage/input_tokens" -+ // Cohere Command -> "/prompt" -+ // Cohere Command R -> "/message" -+ // AI21 Jamba -> "/usage/prompt_tokens" -+ // Meta Llama -> "/prompt_token_count" -+ // Mistral AI -> "/prompt" -+ @Nullable -+ private static String getInputTokens(BedrockJsonParser.LlmJson jsonBody) { -+ // Try direct tokens counts first -+ Object directCount = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, -+ "/inputTextTokenCount", -+ "/prompt_token_count", -+ "/usage/input_tokens", -+ "/usage/prompt_tokens", -+ "/usage/inputTokens"); -+ -+ if (directCount != null) { -+ return String.valueOf(directCount); -+ } -+ -+ // Fall back to token approximation -+ Object approxTokenCount = approximateTokenCount(jsonBody, "/prompt", "/message"); -+ -+ return approxTokenCount != null ? String.valueOf(approxTokenCount) : null; -+ } -+ -+ // Model -> Path Mapping: -+ // Amazon Nova -> "/usage/outputTokens" -+ // Amazon Titan -> "/results/0/tokenCount" -+ // Anthropic Claude -> "/usage/output_tokens" -+ // Cohere Command -> "/generations/0/text" -+ // Cohere Command R -> "/text" -+ // AI21 Jamba -> "/usage/completion_tokens" -+ // Meta Llama -> "/generation_token_count" -+ // Mistral AI -> "/outputs/0/text" -+ @Nullable -+ private static String getOutputTokens(BedrockJsonParser.LlmJson jsonBody) { -+ // Try direct token counts first -+ Object directCount = -+ BedrockJsonParser.JsonPathResolver.resolvePath( -+ jsonBody, -+ "/generation_token_count", -+ "/results/0/tokenCount", -+ "/usage/output_tokens", -+ "/usage/completion_tokens", -+ "/usage/outputTokens"); -+ -+ if (directCount != null) { -+ return String.valueOf(directCount); -+ } -+ -+ // Fall back to token approximation -+ Object approxTokenCount = approximateTokenCount(jsonBody, "/text", "/outputs/0/text"); -+ -+ return approxTokenCount != null ? String.valueOf(approxTokenCount) : null; -+ } - } -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/TracingExecutionInterceptor.java b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/TracingExecutionInterceptor.java -index 94243d0b11..06d8a9141b 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/TracingExecutionInterceptor.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/internal/TracingExecutionInterceptor.java -@@ -5,6 +5,10 @@ - - package io.opentelemetry.instrumentation.awssdk.v2_2.internal; - -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_AUTH_ACCESS_KEY; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.AWS_AUTH_REGION; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsExperimentalAttributes.GEN_AI_SYSTEM; -+import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.BEDROCKRUNTIME; - import static io.opentelemetry.instrumentation.awssdk.v2_2.internal.AwsSdkRequestType.DYNAMODB; - - import io.opentelemetry.api.common.AttributeKey; -@@ -28,6 +32,7 @@ import java.time.Instant; - import java.util.Optional; - import java.util.stream.Collectors; - import javax.annotation.Nullable; -+import software.amazon.awssdk.auth.credentials.AwsCredentials; - import software.amazon.awssdk.auth.signer.AwsSignerExecutionAttribute; - import software.amazon.awssdk.awscore.AwsResponse; - import software.amazon.awssdk.core.ClientType; -@@ -40,6 +45,7 @@ import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; - import software.amazon.awssdk.core.interceptor.SdkExecutionAttribute; - import software.amazon.awssdk.http.SdkHttpRequest; - import software.amazon.awssdk.http.SdkHttpResponse; -+import software.amazon.awssdk.regions.Region; - - /** - * AWS request execution interceptor. -@@ -48,6 +54,7 @@ import software.amazon.awssdk.http.SdkHttpResponse; - * at any time. - */ - public final class TracingExecutionInterceptor implements ExecutionInterceptor { -+ private static final String GEN_AI_SYSTEM_BEDROCK = "aws.bedrock"; - - // copied from DbIncubatingAttributes - private static final AttributeKey DB_OPERATION = AttributeKey.stringKey("db.operation"); -@@ -261,6 +268,26 @@ public final class TracingExecutionInterceptor implements ExecutionInterceptor { - SdkHttpRequest httpRequest = context.httpRequest(); - executionAttributes.putAttribute(SDK_HTTP_REQUEST_ATTRIBUTE, httpRequest); - -+ if (captureExperimentalSpanAttributes) { -+ AwsCredentials credentials = -+ executionAttributes.getAttribute(AwsSignerExecutionAttribute.AWS_CREDENTIALS); -+ Region signingRegion = -+ executionAttributes.getAttribute(AwsSignerExecutionAttribute.SIGNING_REGION); -+ Span span = Span.fromContext(otelContext); -+ -+ if (credentials != null) { -+ String accessKeyId = credentials.accessKeyId(); -+ if (accessKeyId != null) { -+ span.setAttribute(AWS_AUTH_ACCESS_KEY, accessKeyId); -+ } -+ } -+ -+ if (signingRegion != null) { -+ String region = signingRegion.toString(); -+ span.setAttribute(AWS_AUTH_REGION, region); -+ } -+ } -+ - // We ought to pass the parent of otelContext here, but we didn't store it, and it shouldn't - // make a difference (unless we start supporting the http.resend_count attribute in this - // instrumentation, which, logically, we can't on this level of abstraction) -@@ -342,6 +369,10 @@ public final class TracingExecutionInterceptor implements ExecutionInterceptor { - } - } - } -+ -+ if (awsSdkRequest.type() == BEDROCKRUNTIME) { -+ span.setAttribute(GEN_AI_SYSTEM, GEN_AI_SYSTEM_BEDROCK); -+ } - } - - @Override -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParserTest.groovy b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParserTest.groovy -new file mode 100644 -index 0000000000..9dff7aa804 ---- /dev/null -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/library/src/test/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/internal/BedrockJsonParserTest.groovy -@@ -0,0 +1,107 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.instrumentation.awssdk.v2_2.internal -+ -+import spock.lang.Specification -+ -+class BedrockJsonParserTest extends Specification { -+ def "should parse simple JSON object"() { -+ given: -+ String json = '{"key":"value"}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ parsedJson.getJsonBody() == [key: "value"] -+ } -+ -+ def "should parse nested JSON object"() { -+ given: -+ String json = '{"parent":{"child":"value"}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ def parent = parsedJson.getJsonBody().get("parent") -+ parent instanceof Map -+ parent["child"] == "value" -+ } -+ -+ def "should parse JSON array"() { -+ given: -+ String json = '{"array":[1, "two", 1.0]}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ def array = parsedJson.getJsonBody().get("array") -+ array instanceof List -+ array == [1, "two", 1.0] -+ } -+ -+ def "should parse escape sequences"() { -+ given: -+ String json = '{"escaped":"Line1\\nLine2\\tTabbed\\\"Quoted\\\"\\bBackspace\\fFormfeed\\rCarriageReturn\\\\Backslash\\/Slash\\u0041"}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ -+ then: -+ parsedJson.getJsonBody().get("escaped") == -+ "Line1\nLine2\tTabbed\"Quoted\"\bBackspace\fFormfeed\rCarriageReturn\\Backslash/SlashA" -+ } -+ -+ def "should throw exception for malformed JSON"() { -+ given: -+ String malformedJson = '{"key":value}' -+ -+ when: -+ BedrockJsonParser.parse(malformedJson) -+ -+ then: -+ def ex = thrown(IllegalArgumentException) -+ ex.message.contains("Unexpected character") -+ } -+ -+ def "should resolve path in JSON object"() { -+ given: -+ String json = '{"parent":{"child":{"key":"value"}}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/parent/child/key") -+ -+ then: -+ resolvedValue == "value" -+ } -+ -+ def "should resolve path in JSON array"() { -+ given: -+ String json = '{"array":[{"key":"value1"}, {"key":"value2"}]}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/array/1/key") -+ -+ then: -+ resolvedValue == "value2" -+ } -+ -+ def "should return null for invalid path resolution"() { -+ given: -+ String json = '{"parent":{"child":{"key":"value"}}}' -+ -+ when: -+ def parsedJson = BedrockJsonParser.parse(json) -+ def resolvedValue = BedrockJsonParser.JsonPathResolver.resolvePath(parsedJson, "/invalid/path") -+ -+ then: -+ resolvedValue == null -+ } -+} -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/build.gradle.kts b/instrumentation/aws-sdk/aws-sdk-2.2/testing/build.gradle.kts -index 08b000a05c..de0fe82638 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/build.gradle.kts -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/build.gradle.kts -@@ -20,6 +20,9 @@ dependencies { - compileOnly("software.amazon.awssdk:sqs:2.2.0") - compileOnly("software.amazon.awssdk:sns:2.2.0") - compileOnly("software.amazon.awssdk:ses:2.2.0") -+ compileOnly("software.amazon.awssdk:sfn:2.2.0") -+ compileOnly("software.amazon.awssdk:lambda:2.2.0") -+ compileOnly("software.amazon.awssdk:secretsmanager:2.2.0") - - // needed for SQS - using emq directly as localstack references emq v0.15.7 ie WITHOUT AWS trace header propagation - implementation("org.elasticmq:elasticmq-rest-sqs_2.13") -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientCoreTest.groovy b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientCoreTest.groovy -index 9aaacb3abe..198990a509 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientCoreTest.groovy -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientCoreTest.groovy -@@ -146,6 +146,8 @@ abstract class AbstractAws2ClientCoreTest extends InstrumentationSpecification { - "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" - "$RpcIncubatingAttributes.RPC_SERVICE" "DynamoDb" - "$RpcIncubatingAttributes.RPC_METHOD" "CreateTable" -+ "aws.auth.account.access_key" "my-access-key" -+ "aws.auth.region" "ap-northeast-1" - "aws.agent" "java-aws-sdk" - "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" - "aws.table.name" "sometable" -@@ -179,6 +181,8 @@ abstract class AbstractAws2ClientCoreTest extends InstrumentationSpecification { - "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" - "$RpcIncubatingAttributes.RPC_SERVICE" "DynamoDb" - "$RpcIncubatingAttributes.RPC_METHOD" "Query" -+ "aws.auth.account.access_key" "my-access-key" -+ "aws.auth.region" "ap-northeast-1" - "aws.agent" "java-aws-sdk" - "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" - "aws.table.name" "sometable" -@@ -211,6 +215,8 @@ abstract class AbstractAws2ClientCoreTest extends InstrumentationSpecification { - "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" - "$RpcIncubatingAttributes.RPC_SERVICE" "$service" - "$RpcIncubatingAttributes.RPC_METHOD" "${operation}" -+ "aws.auth.account.access_key" "my-access-key" -+ "aws.auth.region" "ap-northeast-1" - "aws.agent" "java-aws-sdk" - "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" - "aws.table.name" "sometable" -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientTest.groovy b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientTest.groovy -index c571c0aa9c..a6fbdab597 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientTest.groovy -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/groovy/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientTest.groovy -@@ -37,10 +37,19 @@ import software.amazon.awssdk.services.s3.model.GetObjectRequest - import software.amazon.awssdk.services.sns.SnsAsyncClient - import software.amazon.awssdk.services.sns.SnsClient - import software.amazon.awssdk.services.sns.model.PublishRequest -+import software.amazon.awssdk.services.sns.model.SubscribeRequest - import software.amazon.awssdk.services.sqs.SqsAsyncClient - import software.amazon.awssdk.services.sqs.SqsClient - import software.amazon.awssdk.services.sqs.model.CreateQueueRequest - import software.amazon.awssdk.services.sqs.model.SendMessageRequest -+import software.amazon.awssdk.services.sfn.SfnClient -+import software.amazon.awssdk.services.sfn.model.DescribeStateMachineRequest -+import software.amazon.awssdk.services.sfn.model.DescribeActivityRequest -+import software.amazon.awssdk.services.lambda.LambdaClient -+import software.amazon.awssdk.services.lambda.model.GetFunctionRequest -+import software.amazon.awssdk.services.lambda.model.GetEventSourceMappingRequest -+import software.amazon.awssdk.services.secretsmanager.SecretsManagerClient -+import software.amazon.awssdk.services.secretsmanager.model.GetSecretValueRequest - import spock.lang.Unroll - - import java.nio.charset.StandardCharsets -@@ -134,6 +143,8 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" - "$RpcIncubatingAttributes.RPC_SERVICE" "$service" - "$RpcIncubatingAttributes.RPC_METHOD" "${operation}" -+ "aws.auth.account.access_key" "my-access-key" -+ "aws.auth.region" "ap-northeast-1" - "aws.agent" "java-aws-sdk" - "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" - if (service == "S3") { -@@ -148,8 +159,32 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - "$MessagingIncubatingAttributes.MESSAGING_SYSTEM" MessagingIncubatingAttributes.MessagingSystemIncubatingValues.AWS_SQS - } else if (service == "Kinesis") { - "aws.stream.name" "somestream" -- } else if (service == "Sns") { -- "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "somearn" -+ } else if (service == "Sns" && operation == "Publish") { -+ "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "sometargetarn" -+ } else if (service == "Sns" && operation == "Subscribe") { -+ "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "sometopicarn" -+ "aws.sns.topic.arn" "sometopicarn" -+ } else if (service == "Bedrock" && operation == "GetGuardrail") { -+ "aws.bedrock.guardrail.id" "guardrailId" -+ } else if (service == "BedrockAgent" && operation == "GetAgent") { -+ "aws.bedrock.agent.id" "agentId" -+ } else if (service == "BedrockAgent" && operation == "GetKnowledgeBase") { -+ "aws.bedrock.knowledge_base.id" "knowledgeBaseId" -+ } else if (service == "BedrockAgent" && operation == "GetDataSource") { -+ "aws.bedrock.data_source.id" "datasourceId" -+ } else if (service == "BedrockRuntime" && operation == "InvokeModel") { -+ "gen_ai.request.model" "meta.llama2-13b-chat-v1" -+ "gen_ai.system" "aws.bedrock" -+ } else if (service == "Sfn" && operation == "DescribeStateMachine") { -+ "aws.stepfunctions.state_machine.arn" "stateMachineArn" -+ } else if (service == "Sfn" && operation == "DescribeActivity") { -+ "aws.stepfunctions.activity.arn" "activityArn" -+ } else if (service == "Lambda" && operation == "GetFunction") { -+ "aws.lambda.function.name" "functionName" -+ } else if (service == "Lambda" && operation == "GetEventSourceMapping") { -+ "aws.lambda.resource_mapping.id" "sourceEventId" -+ } else if (service == "SecretsManager") { -+ "aws.secretsmanager.secret.arn" "someSecretArn" - } - } - } -@@ -164,7 +199,7 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - "S3" | "CreateBucket" | "PUT" | "UNKNOWN" | s3ClientBuilder() | { c -> c.createBucket(CreateBucketRequest.builder().bucket("somebucket").build()) } | "" - "S3" | "GetObject" | "GET" | "UNKNOWN" | s3ClientBuilder() | { c -> c.getObject(GetObjectRequest.builder().bucket("somebucket").key("somekey").build()) } | "" - "Kinesis" | "DeleteStream" | "POST" | "UNKNOWN" | KinesisClient.builder() | { c -> c.deleteStream(DeleteStreamRequest.builder().streamName("somestream").build()) } | "" -- "Sns" | "Publish" | "POST" | "d74b8436-ae13-5ab4-a9ff-ce54dfea72a0" | SnsClient.builder() | { c -> c.publish(PublishRequest.builder().message("somemessage").topicArn("somearn").build()) } | """ -+ "Sns" | "Publish" | "POST" | "d74b8436-ae13-5ab4-a9ff-ce54dfea72a0" | SnsClient.builder() | { c -> c.publish(PublishRequest.builder().message("somemessage").targetArn("sometargetarn").build()) } | """ - - - 567910cd-659e-55d4-8ccb-5aaf14679dc0 -@@ -174,15 +209,15 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - - - """ -- "Sns" | "Publish" | "POST" | "d74b8436-ae13-5ab4-a9ff-ce54dfea72a0" | SnsClient.builder() | { c -> c.publish(PublishRequest.builder().message("somemessage").targetArn("somearn").build()) } | """ -- -- -- 567910cd-659e-55d4-8ccb-5aaf14679dc0 -- -+ "Sns" | "Subscribe" | "POST" | "1234-5678-9101-1121" | SnsClient.builder() | { c -> c.subscribe(SubscribeRequest.builder().topicArn("sometopicarn").protocol("email").endpoint("test@example.com").build())} | """ -+ -+ -+ arn:aws:sns:us-west-2:123456789012:MyTopic:abc123 -+ - -- d74b8436-ae13-5ab4-a9ff-ce54dfea72a0 -+ 1234-5678-9101-1121 - -- -+ - """ - "Sqs" | "CreateQueue" | "POST" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | SqsClient.builder() | { c -> c.createQueue(CreateQueueRequest.builder().queueName("somequeue").build()) } | { - if (!Boolean.getBoolean("testLatestDeps")) { -@@ -244,170 +279,193 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - 0ac9cda2-bbf4-11d3-f92b-31fa5e8dbc99 - - """ -- } -- -- def "send #operation async request with builder #builder.class.getName() mocked response"() { -- assumeSupportedConfig(service, operation) -- setup: -- configureSdkClient(builder) -- def client = builder -- .endpointOverride(clientUri) -- .region(Region.AP_NORTHEAST_1) -- .credentialsProvider(CREDENTIALS_PROVIDER) -- .build() -- -- if (body instanceof Closure) { -- server.enqueue(body.call()) -- } else { -- server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, body)) -- } -- -- def response = call.call(client) -- if (response instanceof Future) { -- response = response.get() -- } -- -- expect: -- response != null -- -- assertTraces(1) { -- trace(0, 1) { -- span(0) { -- name operation != "SendMessage" ? "$service.$operation" : "somequeue publish" -- kind operation != "SendMessage" ? CLIENT : PRODUCER -- hasNoParent() -- attributes { -- if (service == "S3") { -- // Starting with AWS SDK V2 2.18.0, the s3 sdk will prefix the hostname with the bucket name in case -- // the bucket name is a valid DNS label, even in the case that we are using an endpoint override. -- // Previously the sdk was only doing that if endpoint had "s3" as label in the FQDN. -- // Our test assert both cases so that we don't need to know what version is being tested. -- "$ServerAttributes.SERVER_ADDRESS" { it == "somebucket.localhost" || it == "localhost" } -- "$UrlAttributes.URL_FULL" { it.startsWith("http://somebucket.localhost:${server.httpPort()}") || it.startsWith("http://localhost:${server.httpPort()}") } -- } else { -- "$ServerAttributes.SERVER_ADDRESS" "localhost" -- "$UrlAttributes.URL_FULL" { it == "http://localhost:${server.httpPort()}" || it == "http://localhost:${server.httpPort()}/" } -- } -- "$ServerAttributes.SERVER_PORT" server.httpPort() -- "$HttpAttributes.HTTP_REQUEST_METHOD" "$method" -- "$HttpAttributes.HTTP_RESPONSE_STATUS_CODE" 200 -- "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" -- "$RpcIncubatingAttributes.RPC_SERVICE" "$service" -- "$RpcIncubatingAttributes.RPC_METHOD" "${operation}" -- "aws.agent" "java-aws-sdk" -- "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" -- if (service == "S3") { -- "aws.bucket.name" "somebucket" -- } else if (service == "Sqs" && operation == "CreateQueue") { -- "aws.queue.name" "somequeue" -- } else if (service == "Sqs" && operation == "SendMessage") { -- "aws.queue.url" QUEUE_URL -- "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "somequeue" -- "$MessagingIncubatingAttributes.MESSAGING_OPERATION" "publish" -- "$MessagingIncubatingAttributes.MESSAGING_MESSAGE_ID" String -- "$MessagingIncubatingAttributes.MESSAGING_SYSTEM" MessagingIncubatingAttributes.MessagingSystemIncubatingValues.AWS_SQS -- } else if (service == "Kinesis") { -- "aws.stream.name" "somestream" -- } else if (service == "Sns") { -- "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "somearn" -- } -- } -- } -- } -- } -- def request = server.takeRequest() -- request.request().headers().get("X-Amzn-Trace-Id") != null -- request.request().headers().get("traceparent") == null -- -- if (service == "Sns" && operation == "Publish") { -- def content = request.request().content().toStringUtf8() -- def containsId = content.contains("${traces[0][0].traceId}-${traces[0][0].spanId}") -- def containsTp = content.contains("=traceparent") -- if (isSqsAttributeInjectionEnabled()) { -- assert containsId && containsTp -- } else { -- assert !containsId && !containsTp -- } -- } -- -- where: -- service | operation | method | requestId | builder | call | body -- "S3" | "CreateBucket" | "PUT" | "UNKNOWN" | s3AsyncClientBuilder() | { c -> c.createBucket(CreateBucketRequest.builder().bucket("somebucket").build()) } | "" -- "S3" | "GetObject" | "GET" | "UNKNOWN" | s3AsyncClientBuilder() | { c -> c.getObject(GetObjectRequest.builder().bucket("somebucket").key("somekey").build(), AsyncResponseTransformer.toBytes()) } | "1234567890" -- // Kinesis seems to expect an http2 response which is incompatible with our test server. -- // "Kinesis" | "DeleteStream" | "POST" | "/" | "UNKNOWN" | KinesisAsyncClient.builder() | { c -> c.deleteStream(DeleteStreamRequest.builder().streamName("somestream").build()) } | "" -- "Sqs" | "CreateQueue" | "POST" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | SqsAsyncClient.builder() | { c -> c.createQueue(CreateQueueRequest.builder().queueName("somequeue").build()) } | { -- if (!Boolean.getBoolean("testLatestDeps")) { -- def content = """ -- -- https://queue.amazonaws.com/123456789012/MyQueue -- 7a62c49f-347e-4fc4-9331-6e8e7a96aa73 -- -- """ -- return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, content) -- } -- def content = """ -- { -- "QueueUrl":"https://queue.amazonaws.com/123456789012/MyQueue" -- } -- """ -- ResponseHeaders headers = ResponseHeaders.builder(HttpStatus.OK) -- .contentType(MediaType.PLAIN_TEXT_UTF_8) -- .add("x-amzn-RequestId", "7a62c49f-347e-4fc4-9331-6e8e7a96aa73") -- .build() -- return HttpResponse.of(headers, HttpData.of(StandardCharsets.UTF_8, content)) -- } -- "Sqs" | "SendMessage" | "POST" | "27daac76-34dd-47df-bd01-1f6e873584a0" | SqsAsyncClient.builder() | { c -> c.sendMessage(SendMessageRequest.builder().queueUrl(QUEUE_URL).messageBody("").build()) } | { -- if (!Boolean.getBoolean("testLatestDeps")) { -- def content = """ -- -- -- d41d8cd98f00b204e9800998ecf8427e -- 3ae8f24a165a8cedc005670c81a27295 -- 5fea7756-0ea4-451a-a703-a558b933e274 -- -- 27daac76-34dd-47df-bd01-1f6e873584a0 -- -- """ -- return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, content) -+ "Sfn" | "DescribeStateMachine" | "POST" | "UNKNOWN" | SfnClient.builder() -+ | { c -> c.describeStateMachine(DescribeStateMachineRequest.builder().stateMachineArn("stateMachineArn").build()) } -+ | "" -+ "Sfn" | "DescribeActivity" | "POST" | "UNKNOWN" | SfnClient.builder() -+ | { c -> c.describeActivity(DescribeActivityRequest.builder().activityArn("activityArn").build()) } -+ | "" -+ "Lambda" | "GetFunction" | "GET" | "UNKNOWN" | LambdaClient.builder() -+ | { c -> c.getFunction(GetFunctionRequest.builder().functionName("functionName").build()) } -+ | "" -+ "Lambda" | "GetEventSourceMapping" | "GET" |"UNKNOWN" | LambdaClient.builder() -+ | { c -> c.getEventSourceMapping(GetEventSourceMappingRequest.builder().uuid("sourceEventId").build()) } -+ | "" -+ "SecretsManager" | "GetSecretValue" | "POST" | "UNKNOWN" | SecretsManagerClient.builder() -+ | { c -> c.getSecretValue(GetSecretValueRequest.builder().secretId("someSecret1").build()) } -+ | """ -+ { -+ "ARN":"someSecretArn", -+ "CreatedDate":1.523477145713E9, -+ "Name":"MyTestDatabaseSecret", -+ "SecretString":"{\\n \\"username\\":\\"david\\",\\n \\"password\\":\\"EXAMPLE-PASSWORD\\"\\n}\\n", -+ "VersionId":"EXAMPLE1-90ab-cdef-fedc-ba987SECRET1" - } -- def content = """ -- { -- "MD5OfMessageBody":"d41d8cd98f00b204e9800998ecf8427e", -- "MD5OfMessageAttributes":"3ae8f24a165a8cedc005670c81a27295", -- "MessageId":"5fea7756-0ea4-451a-a703-a558b933e274" -- } -- """ -- ResponseHeaders headers = ResponseHeaders.builder(HttpStatus.OK) -- .contentType(MediaType.PLAIN_TEXT_UTF_8) -- .add("x-amzn-RequestId", "27daac76-34dd-47df-bd01-1f6e873584a0") -- .build() -- return HttpResponse.of(headers, HttpData.of(StandardCharsets.UTF_8, content)) -- } -- "Ec2" | "AllocateAddress" | "POST" | "59dbff89-35bd-4eac-99ed-be587EXAMPLE" | Ec2AsyncClient.builder() | { c -> c.allocateAddress() } | """ -- -- 59dbff89-35bd-4eac-99ed-be587EXAMPLE -- 192.0.2.1 -- standard -- -- """ -- "Rds" | "DeleteOptionGroup" | "POST" | "0ac9cda2-bbf4-11d3-f92b-31fa5e8dbc99" | RdsAsyncClient.builder() | { c -> c.deleteOptionGroup(DeleteOptionGroupRequest.builder().build()) } | """ -- -- 0ac9cda2-bbf4-11d3-f92b-31fa5e8dbc99 -- -- """ -- "Sns" | "Publish" | "POST" | "f187a3c1-376f-11df-8963-01868b7c937a" | SnsAsyncClient.builder() | { SnsAsyncClient c -> c.publish(r -> r.message("hello").topicArn("somearn")) } | """ -- -- -- 94f20ce6-13c5-43a0-9a9e-ca52d816e90b -- -- -- f187a3c1-376f-11df-8963-01868b7c937a -- -- -- """ -+ """ - } - -+// def "send #operation async request with builder #builder.class.getName() mocked response"() { -+// assumeSupportedConfig(service, operation) -+// setup: -+// configureSdkClient(builder) -+// def client = builder -+// .endpointOverride(clientUri) -+// .region(Region.AP_NORTHEAST_1) -+// .credentialsProvider(CREDENTIALS_PROVIDER) -+// .build() -+// -+// if (body instanceof Closure) { -+// server.enqueue(body.call()) -+// } else { -+// server.enqueue(HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, body)) -+// } -+// -+// def response = call.call(client) -+// if (response instanceof Future) { -+// response = response.get() -+// } -+// -+// expect: -+// response != null -+// -+// assertTraces(1) { -+// trace(0, 1) { -+// span(0) { -+// name operation != "SendMessage" ? "$service.$operation" : "somequeue publish" -+// kind operation != "SendMessage" ? CLIENT : PRODUCER -+// hasNoParent() -+// attributes { -+// if (service == "S3") { -+// // Starting with AWS SDK V2 2.18.0, the s3 sdk will prefix the hostname with the bucket name in case -+// // the bucket name is a valid DNS label, even in the case that we are using an endpoint override. -+// // Previously the sdk was only doing that if endpoint had "s3" as label in the FQDN. -+// // Our test assert both cases so that we don't need to know what version is being tested. -+// "$ServerAttributes.SERVER_ADDRESS" { it == "somebucket.localhost" || it == "localhost" } -+// "$UrlAttributes.URL_FULL" { it.startsWith("http://somebucket.localhost:${server.httpPort()}") || it.startsWith("http://localhost:${server.httpPort()}") } -+// } else { -+// "$ServerAttributes.SERVER_ADDRESS" "localhost" -+// "$UrlAttributes.URL_FULL" { it == "http://localhost:${server.httpPort()}" || it == "http://localhost:${server.httpPort()}/" } -+// } -+// "$ServerAttributes.SERVER_PORT" server.httpPort() -+// "$HttpAttributes.HTTP_REQUEST_METHOD" "$method" -+// "$HttpAttributes.HTTP_RESPONSE_STATUS_CODE" 200 -+// "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" -+// "$RpcIncubatingAttributes.RPC_SERVICE" "$service" -+// "$RpcIncubatingAttributes.RPC_METHOD" "${operation}" -+// "aws.agent" "java-aws-sdk" -+// "$AwsIncubatingAttributes.AWS_REQUEST_ID" "$requestId" -+// if (service == "S3") { -+// "aws.bucket.name" "somebucket" -+// } else if (service == "Sqs" && operation == "CreateQueue") { -+// "aws.queue.name" "somequeue" -+// } else if (service == "Sqs" && operation == "SendMessage") { -+// "aws.queue.url" QUEUE_URL -+// "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "somequeue" -+// "$MessagingIncubatingAttributes.MESSAGING_OPERATION" "publish" -+// "$MessagingIncubatingAttributes.MESSAGING_MESSAGE_ID" String -+// "$MessagingIncubatingAttributes.MESSAGING_SYSTEM" MessagingIncubatingAttributes.MessagingSystemIncubatingValues.AWS_SQS -+// } else if (service == "Kinesis") { -+// "aws.stream.name" "somestream" -+// } else if (service == "Sns") { -+// "$MessagingIncubatingAttributes.MESSAGING_DESTINATION_NAME" "somearn" -+// } -+// } -+// } -+// } -+// } -+// def request = server.takeRequest() -+// request.request().headers().get("X-Amzn-Trace-Id") != null -+// request.request().headers().get("traceparent") == null -+// -+// if (service == "Sns" && operation == "Publish") { -+// def content = request.request().content().toStringUtf8() -+// def containsId = content.contains("${traces[0][0].traceId}-${traces[0][0].spanId}") -+// def containsTp = content.contains("=traceparent") -+// if (isSqsAttributeInjectionEnabled()) { -+// assert containsId && containsTp -+// } else { -+// assert !containsId && !containsTp -+// } -+// } -+// -+// where: -+// service | operation | method | requestId | builder | call | body -+// "S3" | "CreateBucket" | "PUT" | "UNKNOWN" | s3AsyncClientBuilder() | { c -> c.createBucket(CreateBucketRequest.builder().bucket("somebucket").build()) } | "" -+// "S3" | "GetObject" | "GET" | "UNKNOWN" | s3AsyncClientBuilder() | { c -> c.getObject(GetObjectRequest.builder().bucket("somebucket").key("somekey").build(), AsyncResponseTransformer.toBytes()) } | "1234567890" -+// // Kinesis seems to expect an http2 response which is incompatible with our test server. -+// // "Kinesis" | "DeleteStream" | "POST" | "/" | "UNKNOWN" | KinesisAsyncClient.builder() | { c -> c.deleteStream(DeleteStreamRequest.builder().streamName("somestream").build()) } | "" -+// "Sqs" | "CreateQueue" | "POST" | "7a62c49f-347e-4fc4-9331-6e8e7a96aa73" | SqsAsyncClient.builder() | { c -> c.createQueue(CreateQueueRequest.builder().queueName("somequeue").build()) } | { -+// if (!Boolean.getBoolean("testLatestDeps")) { -+// def content = """ -+// -+// https://queue.amazonaws.com/123456789012/MyQueue -+// 7a62c49f-347e-4fc4-9331-6e8e7a96aa73 -+// -+// """ -+// return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, content) -+// } -+// def content = """ -+// { -+// "QueueUrl":"https://queue.amazonaws.com/123456789012/MyQueue" -+// } -+// """ -+// ResponseHeaders headers = ResponseHeaders.builder(HttpStatus.OK) -+// .contentType(MediaType.PLAIN_TEXT_UTF_8) -+// .add("x-amzn-RequestId", "7a62c49f-347e-4fc4-9331-6e8e7a96aa73") -+// .build() -+// return HttpResponse.of(headers, HttpData.of(StandardCharsets.UTF_8, content)) -+// } -+// "Sqs" | "SendMessage" | "POST" | "27daac76-34dd-47df-bd01-1f6e873584a0" | SqsAsyncClient.builder() | { c -> c.sendMessage(SendMessageRequest.builder().queueUrl(QUEUE_URL).messageBody("").build()) } | { -+// if (!Boolean.getBoolean("testLatestDeps")) { -+// def content = """ -+// -+// -+// d41d8cd98f00b204e9800998ecf8427e -+// 3ae8f24a165a8cedc005670c81a27295 -+// 5fea7756-0ea4-451a-a703-a558b933e274 -+// -+// 27daac76-34dd-47df-bd01-1f6e873584a0 -+// -+// """ -+// return HttpResponse.of(HttpStatus.OK, MediaType.PLAIN_TEXT_UTF_8, content) -+// } -+// def content = """ -+// { -+// "MD5OfMessageBody":"d41d8cd98f00b204e9800998ecf8427e", -+// "MD5OfMessageAttributes":"3ae8f24a165a8cedc005670c81a27295", -+// "MessageId":"5fea7756-0ea4-451a-a703-a558b933e274" -+// } -+// """ -+// ResponseHeaders headers = ResponseHeaders.builder(HttpStatus.OK) -+// .contentType(MediaType.PLAIN_TEXT_UTF_8) -+// .add("x-amzn-RequestId", "27daac76-34dd-47df-bd01-1f6e873584a0") -+// .build() -+// return HttpResponse.of(headers, HttpData.of(StandardCharsets.UTF_8, content)) -+// } -+// "Ec2" | "AllocateAddress" | "POST" | "59dbff89-35bd-4eac-99ed-be587EXAMPLE" | Ec2AsyncClient.builder() | { c -> c.allocateAddress() } | """ -+// -+// 59dbff89-35bd-4eac-99ed-be587EXAMPLE -+// 192.0.2.1 -+// standard -+// -+// """ -+// "Rds" | "DeleteOptionGroup" | "POST" | "0ac9cda2-bbf4-11d3-f92b-31fa5e8dbc99" | RdsAsyncClient.builder() | { c -> c.deleteOptionGroup(DeleteOptionGroupRequest.builder().build()) } | """ -+// -+// 0ac9cda2-bbf4-11d3-f92b-31fa5e8dbc99 -+// -+// """ -+// "Sns" | "Publish" | "POST" | "f187a3c1-376f-11df-8963-01868b7c937a" | SnsAsyncClient.builder() | { SnsAsyncClient c -> c.publish(r -> r.message("hello").topicArn("somearn")) } | """ -+// -+// -+// 94f20ce6-13c5-43a0-9a9e-ca52d816e90b -+// -+// -+// f187a3c1-376f-11df-8963-01868b7c937a -+// -+// -+// """ -+// } -+ - // TODO: Without AOP instrumentation of the HTTP client, we cannot model retries as - // spans because of https://github.com/aws/aws-sdk-java-v2/issues/1741. We should at least tweak - // the instrumentation to add Events for retries instead. -@@ -457,6 +515,8 @@ abstract class AbstractAws2ClientTest extends AbstractAws2ClientCoreTest { - "$RpcIncubatingAttributes.RPC_SYSTEM" "aws-api" - "$RpcIncubatingAttributes.RPC_SERVICE" "S3" - "$RpcIncubatingAttributes.RPC_METHOD" "GetObject" -+ "aws.auth.account.access_key" "my-access-key" -+ "aws.auth.region" "ap-northeast-1" - "aws.agent" "java-aws-sdk" - "aws.bucket.name" "somebucket" - } -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientRecordHttpErrorTest.java b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientRecordHttpErrorTest.java -index 73d2a0ba82..f46361a078 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientRecordHttpErrorTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2ClientRecordHttpErrorTest.java -@@ -172,6 +172,8 @@ public abstract class AbstractAws2ClientRecordHttpErrorTest { - span.hasKind(SpanKind.CLIENT); - span.hasNoParent(); - span.hasAttributesSatisfyingExactly( -+ equalTo(stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - equalTo(SERVER_ADDRESS, "127.0.0.1"), - equalTo(SERVER_PORT, server.httpPort()), - equalTo(HTTP_REQUEST_METHOD, method), -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsBaseTest.java b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsBaseTest.java -index 902bfdc0d4..756968776e 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsBaseTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsBaseTest.java -@@ -214,6 +214,8 @@ public abstract class AbstractAws2SqsBaseTest { - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "Sqs"), - equalTo(RPC_METHOD, "CreateQueue"), -+ equalTo(stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), - satisfies(URL_FULL, v -> v.startsWith("http://localhost:" + sqsPort)), -@@ -257,6 +259,8 @@ public abstract class AbstractAws2SqsBaseTest { - equalTo(RPC_SYSTEM, "aws-api"), - equalTo(RPC_SERVICE, "Sqs"), - equalTo(RPC_METHOD, rcpMethod), -+ equalTo(stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), - satisfies(URL_FULL, v -> v.startsWith("http://localhost:" + sqsPort)), -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsSuppressReceiveSpansTest.java b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsSuppressReceiveSpansTest.java -index 4d0a9be89c..382c035bf5 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsSuppressReceiveSpansTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsSuppressReceiveSpansTest.java -@@ -84,6 +84,8 @@ public abstract class AbstractAws2SqsSuppressReceiveSpansTest extends AbstractAw - equalTo(RPC_METHOD, "ReceiveMessage"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -+ equalTo(stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - satisfies(URL_FULL, v -> v.startsWith("http://localhost:" + sqsPort)), - equalTo(SERVER_ADDRESS, "localhost"), - equalTo(SERVER_PORT, sqsPort)))); -diff --git a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsTracingTest.java b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsTracingTest.java -index 6fa897d462..f7ac28762c 100644 ---- a/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsTracingTest.java -+++ b/instrumentation/aws-sdk/aws-sdk-2.2/testing/src/main/java/io/opentelemetry/instrumentation/awssdk/v2_2/AbstractAws2SqsTracingTest.java -@@ -80,6 +80,9 @@ public abstract class AbstractAws2SqsTracingTest extends AbstractAws2SqsBaseTest - equalTo(RPC_METHOD, "SendMessage"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -+ equalTo( -+ stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - satisfies( - URL_FULL, v -> v.startsWith("http://localhost:" + sqsPort)), - equalTo(SERVER_ADDRESS, "localhost"), -@@ -133,6 +136,9 @@ public abstract class AbstractAws2SqsTracingTest extends AbstractAws2SqsBaseTest - equalTo(RPC_METHOD, "ReceiveMessage"), - equalTo(HTTP_REQUEST_METHOD, "POST"), - equalTo(HTTP_RESPONSE_STATUS_CODE, 200), -+ equalTo( -+ stringKey("aws.auth.account.access_key"), "my-access-key"), -+ equalTo(stringKey("aws.auth.region"), "ap-northeast-1"), - satisfies( - URL_FULL, v -> v.startsWith("http://localhost:" + sqsPort)), - equalTo(SERVER_ADDRESS, "localhost"), -diff --git a/instrumentation/camel-2.20/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/apachecamel/aws/AwsSpanAssertions.java b/instrumentation/camel-2.20/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/apachecamel/aws/AwsSpanAssertions.java -index 8731717005..0d59b40f5e 100644 ---- a/instrumentation/camel-2.20/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/apachecamel/aws/AwsSpanAssertions.java -+++ b/instrumentation/camel-2.20/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/apachecamel/aws/AwsSpanAssertions.java -@@ -94,7 +94,8 @@ class AwsSpanAssertions { - equalTo(NETWORK_PROTOCOL_VERSION, "1.1"), - equalTo(RPC_SYSTEM, "aws-api"), - satisfies(RPC_METHOD, stringAssert -> stringAssert.isEqualTo(rpcMethod)), -- equalTo(RPC_SERVICE, "AmazonSQS"))); -+ equalTo(RPC_SERVICE, "AmazonSQS"), -+ equalTo(stringKey("aws.auth.account.access_key"), "x"))); - - if (spanName.endsWith("receive") - || spanName.endsWith("process") -diff --git a/version.gradle.kts b/version.gradle.kts -index a1cae43b4b..c1520e9947 100644 ---- a/version.gradle.kts -+++ b/version.gradle.kts -@@ -1,5 +1,5 @@ --val stableVersion = "2.11.0" --val alphaVersion = "2.11.0-alpha" -+val stableVersion = "2.11.0-adot1" -+val alphaVersion = "2.11.0-adot1-alpha" - - allprojects { - if (findProperty("otel.stable") != "true") { diff --git a/.github/scripts/patch.sh b/.github/scripts/patch.sh index 7bbfc7356a..b6a6bba94e 100755 --- a/.github/scripts/patch.sh +++ b/.github/scripts/patch.sh @@ -6,7 +6,6 @@ set -x -e -u # This is used so that we can properly clone the upstream repositories. # This file should define the following variables: # OTEL_JAVA_VERSION. Tag of the opentelemetry-java repository to use. E.g.: JAVA_OTEL_JAVA_VERSION=v1.21.0 -# OTEL_JAVA_INSTRUMENTATION_VERSION. Tag of the opentelemetry-java-instrumentation repository to use, e.g.: OTEL_JAVA_INSTRUMENTATION_VERSION=v1.21.0 # OTEL_JAVA_CONTRIB_VERSION. Tag of the opentelemetry-java-contrib repository. E.g.: OTEL_JAVA_CONTRIB_VERSION=v1.21.0 # This script will fail if a variable that is supposed to exist is referenced. @@ -45,16 +44,3 @@ if [[ -f "$OTEL_JAVA_CONTRIB_PATCH" ]]; then else echo "Skipping patching opentelemetry-java-contrib" fi - - -OTEL_JAVA_INSTRUMENTATION_PATCH=".github/patches/opentelemetry-java-instrumentation.patch" -if [[ -f "$OTEL_JAVA_INSTRUMENTATION_PATCH" ]]; then - git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git - cd opentelemetry-java-instrumentation - git checkout ${OTEL_JAVA_INSTRUMENTATION_VERSION} -b tag-${OTEL_JAVA_INSTRUMENTATION_VERSION} - patch -p1 < "../${OTEL_JAVA_INSTRUMENTATION_PATCH}" - git commit -a -m "ADOT Patch release" - cd - -else - echo "Skipping patching opentelemetry-java-instrumentation" -fi diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 6bdf78030b..7bb24e3543 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,7 +27,7 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.11.0-adot1" +val otelVersion = "2.11.0" val otelSnapshotVersion = "2.12.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" diff --git a/instrumentation/aws-sdk/README.md b/instrumentation/aws-sdk/README.md index 1b4d677d3e..9ac6c79290 100644 --- a/instrumentation/aws-sdk/README.md +++ b/instrumentation/aws-sdk/README.md @@ -152,12 +152,32 @@ _Class Functionalities:_ ### Commands for Running Groovy Tests -To run the BedrockJsonParserTest for aws-sdk v1.11: +#### aws-sdk v1.11 +To run the `BedrockJsonParserTest`: ```` ./gradlew :instrumentation:aws-sdk:test --tests "software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v1_11.BedrockJsonParserTest" ```` -To run the BedrockJsonParserTest for aws-sdk v2.2: +#### aws-sdk v2.2 +To run the `BedrockJsonParserTest`: ```` ./gradlew :instrumentation:aws-sdk:test --tests "software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v2_2.BedrockJsonParserTest" -```` \ No newline at end of file +```` + +### Commands for Running Java Tests + +#### aws-sdk v1.11 +To run the `AwsSdkExperimentalAttributesInjectionTest`: +```` +./gradlew :instrumentation:aws-sdk:test --tests "software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v1_11.AwsSdkExperimentalAttributesInjectionTest" +```` + +To run the `AdotAwsSdkClientAdviceTest`: +```` +./gradlew :instrumentation:aws-sdk:test --tests "software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v1_11.AdotAwsSdkClientAdviceTest" +```` + +#### aws-sdk v2.2 +To run the `AwsSdkExperimentalAttributesInjectionTest`: +```` +./gradlew :instrumentation:aws-sdk:test --tests "software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v2_2.AwsSdkExperimentalAttributesInjectionTest" \ No newline at end of file diff --git a/instrumentation/aws-sdk/build.gradle.kts b/instrumentation/aws-sdk/build.gradle.kts index 5863df2a10..101e966a12 100644 --- a/instrumentation/aws-sdk/build.gradle.kts +++ b/instrumentation/aws-sdk/build.gradle.kts @@ -28,16 +28,17 @@ dependencies { compileOnly("com.amazonaws:aws-java-sdk-core:1.11.0") compileOnly("software.amazon.awssdk:aws-core:2.2.0") compileOnly("software.amazon.awssdk:aws-json-protocol:2.2.0") - compileOnly("net.bytebuddy:byte-buddy") - compileOnly("com.google.code.findbugs:jsr305:3.0.2") testImplementation("com.google.guava:guava") - testImplementation("io.opentelemetry.javaagent:opentelemetry-testing-common") - - testImplementation("com.amazonaws:aws-java-sdk-core:1.11.0") - testImplementation("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") testImplementation("org.mockito:mockito-core:5.14.2") - testImplementation("com.google.guava:guava") testImplementation("io.opentelemetry.javaagent:opentelemetry-testing-common") + testImplementation("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") + + testImplementation("software.amazon.awssdk:aws-core:2.2.0") + testImplementation("com.amazonaws:aws-java-sdk-lambda:1.11.678") + testImplementation("com.amazonaws:aws-java-sdk-kinesis:1.11.106") + testImplementation("com.amazonaws:aws-java-sdk-sns:1.11.106") + testImplementation("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") + testImplementation("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") } diff --git a/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v1_11/AwsSdkExperimentalAttributesInjectionTest.java b/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v1_11/AwsSdkExperimentalAttributesInjectionTest.java new file mode 100644 index 0000000000..d019842ce1 --- /dev/null +++ b/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v1_11/AwsSdkExperimentalAttributesInjectionTest.java @@ -0,0 +1,220 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v1_11; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +import com.amazonaws.Request; +import com.amazonaws.Response; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.handlers.HandlerContextKey; +import com.amazonaws.services.kinesis.model.PutRecordRequest; +import com.amazonaws.services.lambda.model.CreateFunctionRequest; +import com.amazonaws.services.lambda.model.FunctionConfiguration; +import com.amazonaws.services.lambda.model.GetFunctionResult; +import com.amazonaws.services.secretsmanager.model.GetSecretValueRequest; +import com.amazonaws.services.sns.model.PublishRequest; +import com.amazonaws.services.stepfunctions.model.StartExecutionRequest; +import io.opentelemetry.api.common.AttributesBuilder; +import io.opentelemetry.context.Context; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +/* + * NOTE: V1.11 attribute extraction is difficult to test in unit tests due to reflection-based + * method access via MethodHandle. Many tests here only verify that the extractor correctly + * identifies different AWS service types rather than actual attribute extraction. However, these + * attributes are comprehensively tested in the contract tests which provide end-to-end validation + * of the reflection-based extraction logic. The contract tests cover most V1.11 attributes + * including all Bedrock Gen AI attributes. + */ +class AwsSdkExperimentalAttributesInjectionTest { + + private AwsSdkExperimentalAttributesExtractor extractor; + private AttributesBuilder attributes; + private Request mockRequest; + private Response mockResponse; + private static final HandlerContextKey AWS_CREDENTIALS = + new HandlerContextKey<>("AWSCredentials"); + + @BeforeEach + void setUp() { + extractor = new AwsSdkExperimentalAttributesExtractor(); + attributes = mock(AttributesBuilder.class); + mockRequest = mock(Request.class); + mockResponse = mock(Response.class); + } + + @Test + void testSnsExperimentalAttributes() { + PublishRequest snsRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AmazonSNS"); + when(mockRequest.getOriginalRequest()).thenReturn(snsRequest); + when(snsRequest.getTopicArn()).thenReturn("arn:aws:sns:region:account:topic/test"); + + extractor.onStart(attributes, Context.current(), mockRequest); + + verify(attributes) + .put( + eq(AwsExperimentalAttributes.AWS_SNS_TOPIC_ARN), + eq("arn:aws:sns:region:account:topic/test")); + } + + @Test + void testKinesisExperimentalAttributes() { + PutRecordRequest kinesisRequest = mock(PutRecordRequest.class); + when(mockRequest.getServiceName()).thenReturn("AmazonKinesis"); + when(mockRequest.getOriginalRequest()).thenReturn(kinesisRequest); + when(kinesisRequest.getStreamARN()).thenReturn("arn:aws:kinesis:region:account:stream/test"); + + extractor.onStart(attributes, Context.current(), mockRequest); + + verify(attributes) + .put( + eq(AwsExperimentalAttributes.AWS_STREAM_ARN), + eq("arn:aws:kinesis:region:account:stream/test")); + } + + @Test + void testStepFunctionsExperimentalAttributes() { + StartExecutionRequest sfnRequest = mock(StartExecutionRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSStepFunctions"); + when(mockRequest.getOriginalRequest()).thenReturn(sfnRequest); + when(sfnRequest.getStateMachineArn()) + .thenReturn("arn:aws:states:region:account:stateMachine/test"); + + extractor.onStart(attributes, Context.current(), mockRequest); + + verify(attributes) + .put( + eq(AwsExperimentalAttributes.AWS_STATE_MACHINE_ARN), + eq("arn:aws:states:region:account:stateMachine/test")); + } + + @Test + void testAuthAccessKeyAttributes() { + AWSCredentials credentials = mock(AWSCredentials.class); + when(mockRequest.getHandlerContext(AWS_CREDENTIALS)).thenReturn(credentials); + when(credentials.getAWSAccessKeyId()).thenReturn("AKIAIOSFODNN7EXAMPLE"); + when(mockRequest.getOriginalRequest()).thenReturn(mock(PublishRequest.class)); + when(mockRequest.getServiceName()).thenReturn("AmazonSNS"); + + extractor.onStart(attributes, Context.current(), mockRequest); + + verify(attributes) + .put(eq(AwsExperimentalAttributes.AWS_AUTH_ACCESS_KEY), eq("AKIAIOSFODNN7EXAMPLE")); + } + + @Test + void testSecretsManagerExperimentalAttributes() { + GetSecretValueRequest secretRequest = mock(GetSecretValueRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSSecretsManager"); + when(mockRequest.getOriginalRequest()).thenReturn(secretRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We're not verifying anything here since the actual attribute setting depends on reflection + } + + @Test + void testLambdaNameExperimentalAttributes() { + CreateFunctionRequest lambdaRequest = mock(CreateFunctionRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSLambda"); + when(mockRequest.getOriginalRequest()).thenReturn(lambdaRequest); + when(lambdaRequest.getFunctionName()).thenReturn("test-function"); + + extractor.onStart(attributes, Context.current(), mockRequest); + + verify(attributes).put(eq(AwsExperimentalAttributes.AWS_LAMBDA_NAME), eq("test-function")); + } + + @Test + void testLambdaArnExperimentalAttributes() { + GetFunctionResult lambdaResult = mock(GetFunctionResult.class); + FunctionConfiguration config = mock(FunctionConfiguration.class); + when(mockResponse.getAwsResponse()).thenReturn(lambdaResult); + when(lambdaResult.getConfiguration()).thenReturn(config); + when(config.getFunctionArn()).thenReturn("arn:aws:lambda:region:account:function:test"); + when(mockRequest.getServiceName()).thenReturn("AWSLambda"); + + extractor.onEnd(attributes, Context.current(), mockRequest, mockResponse, null); + + verify(attributes) + .put( + eq(AwsExperimentalAttributes.AWS_LAMBDA_ARN), + eq("arn:aws:lambda:region:account:function:test")); + } + + @Test + void testLambdaResourceIdExperimentalAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSLambda"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection + } + + @Test + void testTableArnExperimentalAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AmazonDynamoDBv2"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection + } + + @Test + void testBedrockRuntimeAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AmazonBedrockRuntime"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection and class name + } + + @Test + void testBedrockAgentAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSBedrockAgent"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection + } + + @Test + void testBedrockAgentRuntimeAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AWSBedrockAgentRuntime"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection + } + + @Test + void testBedrockGuardrailAttributes() { + PublishRequest originalRequest = mock(PublishRequest.class); + when(mockRequest.getServiceName()).thenReturn("AmazonBedrock"); + when(mockRequest.getOriginalRequest()).thenReturn(originalRequest); + + extractor.onStart(attributes, Context.current(), mockRequest); + // We can't verify the actual attribute setting since it depends on reflection + } +} diff --git a/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v2_2/AwsSdkExperimentalAttributesInjectionTest.java b/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v2_2/AwsSdkExperimentalAttributesInjectionTest.java new file mode 100644 index 0000000000..b5cc1a079c --- /dev/null +++ b/instrumentation/aws-sdk/src/test/java/software/amazon/opentelemetry/javaagent/instrumentation/awssdk_v2_2/AwsSdkExperimentalAttributesInjectionTest.java @@ -0,0 +1,274 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.instrumentation.awssdk_v2_2; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import io.opentelemetry.api.trace.Span; +import java.util.Optional; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.core.SdkRequest; +import software.amazon.awssdk.core.SdkResponse; + +/* + * NOTE: V2.2 attribute extraction uses direct field access via getValueForField() method. + * These tests can fully verify attribute extraction by mocking the field values and verifying + * the correct attributes are set on the span. This provides comprehensive coverage of the + * attribute extraction logic, supplementing the V2 contract tests. + */ +public class AwsSdkExperimentalAttributesInjectionTest { + private FieldMapper fieldMapper; + private Span mockSpan; + private SdkRequest mockRequest; + + @BeforeEach + void setUp() { + fieldMapper = new FieldMapper(); + mockSpan = mock(Span.class); + mockRequest = mock(SdkRequest.class); + } + + @Test + void testS3ExperimentalAttributes() { + when(mockRequest.getValueForField("Bucket", Object.class)) + .thenReturn(Optional.of("test-bucket")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.S3Request, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_BUCKET_NAME.getKey()), eq("test-bucket")); + } + + @Test + void testSqsExperimentalAttributes() { + String queueUrl = "https://sqs.us-east-1.amazonaws.com/123456789012/test-queue"; + when(mockRequest.getValueForField("QueueUrl", Object.class)).thenReturn(Optional.of(queueUrl)); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.SqsRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_QUEUE_URL.getKey()), eq(queueUrl)); + } + + @Test + void testDynamoDbExperimentalAttributes() { + when(mockRequest.getValueForField("TableName", Object.class)) + .thenReturn(Optional.of("test-table")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.DynamoDbRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_TABLE_NAME.getKey()), eq("test-table")); + } + + @Test + void testSnsExperimentalAttributes() { + String topicArn = "arn:aws:sns:us-east-1:123456789012:test-topic"; + when(mockRequest.getValueForField("TopicArn", Object.class)).thenReturn(Optional.of(topicArn)); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.SnsRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_SNS_TOPIC_ARN.getKey()), eq(topicArn)); + } + + @Test + void testKinesisExperimentalAttributes() { + when(mockRequest.getValueForField("StreamName", Object.class)) + .thenReturn(Optional.of("test-stream")); + when(mockRequest.getValueForField("StreamARN", Object.class)) + .thenReturn(Optional.of("arn:aws:kinesis:region:account:stream/test-stream")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.KinesisRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_STREAM_NAME.getKey()), eq("test-stream")); + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_STREAM_ARN.getKey()), + eq("arn:aws:kinesis:region:account:stream/test-stream")); + } + + @Test + void testStepFunctionExperimentalAttributes() { + when(mockRequest.getValueForField("stateMachineArn", Object.class)) + .thenReturn(Optional.of("arn:aws:states:region:account:stateMachine/test")); + when(mockRequest.getValueForField("activityArn", Object.class)) + .thenReturn(Optional.of("arn:aws:states:region:account:activity/test")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.SfnRequest, mockSpan); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_STATE_MACHINE_ARN.getKey()), + eq("arn:aws:states:region:account:stateMachine/test")); + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_STEP_FUNCTIONS_ACTIVITY_ARN.getKey()), + eq("arn:aws:states:region:account:activity/test")); + } + + @Test + void testAuthAccessKeyExperimentalAttribute() { + mockSpan.setAttribute( + AwsExperimentalAttributes.AWS_AUTH_ACCESS_KEY.getKey(), "AKIAIOSFODNN7EXAMPLE"); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_AUTH_ACCESS_KEY.getKey()), eq("AKIAIOSFODNN7EXAMPLE")); + } + + @Test + void testAuthRegionExperimentalAttribute() { + mockSpan.setAttribute(AwsExperimentalAttributes.AWS_AUTH_REGION.getKey(), "us-east-1"); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_AUTH_REGION.getKey()), eq("us-east-1")); + } + + @Test + void testSecretsManagerExperimentalAttributes() { + SdkResponse mockResponse = mock(SdkResponse.class); + when(mockResponse.getValueForField("ARN", Object.class)) + .thenReturn(Optional.of("arn:aws:secretsmanager:region:account:secret:test")); + + fieldMapper.mapToAttributes(mockResponse, AwsSdkRequest.SecretsManagerRequest, mockSpan); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_SECRET_ARN.getKey()), + eq("arn:aws:secretsmanager:region:account:secret:test")); + } + + @Test + void testLambdaNameExperimentalAttribute() { + when(mockRequest.getValueForField("FunctionName", Object.class)) + .thenReturn(Optional.of("test-function")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.LambdaRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_LAMBDA_NAME.getKey()), eq("test-function")); + } + + @Test + void testLambdaResourceIdExperimentalAttribute() { + when(mockRequest.getValueForField("UUID", Object.class)) + .thenReturn(Optional.of("12345678-1234-1234-1234-123456789012")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.LambdaRequest, mockSpan); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_LAMBDA_RESOURCE_ID.getKey()), + eq("12345678-1234-1234-1234-123456789012")); + } + + @Test + void testLambdaArnExperimentalAttribute() { + mockSpan.setAttribute( + AwsExperimentalAttributes.AWS_LAMBDA_ARN.getKey(), + "arn:aws:lambda:us-east-1:123456789012:function:test-function"); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_LAMBDA_ARN.getKey()), + eq("arn:aws:lambda:us-east-1:123456789012:function:test-function")); + } + + @Test + void testTableArnExperimentalAttribute() { + mockSpan.setAttribute( + AwsExperimentalAttributes.AWS_TABLE_ARN.getKey(), + "arn:aws:dynamodb:us-east-1:123456789012:table/test-table"); + + verify(mockSpan) + .setAttribute( + eq(AwsExperimentalAttributes.AWS_TABLE_ARN.getKey()), + eq("arn:aws:dynamodb:us-east-1:123456789012:table/test-table")); + } + + @Test + void testBedrockExperimentalAttributes() { + String modelId = "anthropic.claude-v2"; + SdkBytes requestBody = SdkBytes.fromUtf8String("{\"max_tokens\": 100, \"temperature\": 0.7}"); + + when(mockRequest.getValueForField("modelId", Object.class)).thenReturn(Optional.of(modelId)); + when(mockRequest.getValueForField("body", Object.class)).thenReturn(Optional.of(requestBody)); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.BedrockRuntimeRequest, mockSpan); + + verify(mockSpan).setAttribute(eq(AwsExperimentalAttributes.GEN_AI_MODEL.getKey()), eq(modelId)); + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.GEN_AI_REQUEST_MAX_TOKENS.getKey()), eq("100")); + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.GEN_AI_REQUEST_TEMPERATURE.getKey()), eq("0.7")); + } + + @Test + void testBedrockAgentExperimentalAttributes() { + when(mockRequest.getValueForField("agentId", Object.class)) + .thenReturn(Optional.of("test-agent")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.BedrockBedrockAgentRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_AGENT_ID.getKey()), eq("test-agent")); + } + + @Test + void testBedrockAgentRuntimeExperimentalAttributes() { + when(mockRequest.getValueForField("agentId", Object.class)) + .thenReturn(Optional.of("test-agent")); + when(mockRequest.getValueForField("knowledgeBaseId", Object.class)) + .thenReturn(Optional.of("test-kb")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.BedrockAgentRuntimeRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_AGENT_ID.getKey()), eq("test-agent")); + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_KNOWLEDGE_BASE_ID.getKey()), eq("test-kb")); + } + + @Test + void testBedrockDataSourceExperimentalAttributes() { + when(mockRequest.getValueForField("dataSourceId", Object.class)) + .thenReturn(Optional.of("test-ds")); + + fieldMapper.mapToAttributes(mockRequest, AwsSdkRequest.BedrockGetDataSourceRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_DATA_SOURCE_ID.getKey()), eq("test-ds")); + } + + @Test + void testBedrockKnowledgeBaseExperimentalAttributes() { + when(mockRequest.getValueForField("knowledgeBaseId", Object.class)) + .thenReturn(Optional.of("test-kb")); + + fieldMapper.mapToAttributes( + mockRequest, AwsSdkRequest.BedrockGetKnowledgeBaseRequest, mockSpan); + + verify(mockSpan) + .setAttribute(eq(AwsExperimentalAttributes.AWS_KNOWLEDGE_BASE_ID.getKey()), eq("test-kb")); + } +} diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index 36350cd5b1..8c944191de 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -22,9 +22,6 @@ git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.g pushd opentelemetry-java-instrumentation git checkout v${version} -b tag-v${version} -# There is another patch in the .github/patches directory for other changes. We should apply them too for consistency. -patch -p1 < "$SOURCEDIR"/../.github/patches/opentelemetry-java-instrumentation.patch - # This patch is for Lambda related context propagation patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch diff --git a/lambda-layer/patches/aws-otel-java-instrumentation.patch b/lambda-layer/patches/aws-otel-java-instrumentation.patch index e7546c2107..6b1f5eb9d5 100644 --- a/lambda-layer/patches/aws-otel-java-instrumentation.patch +++ b/lambda-layer/patches/aws-otel-java-instrumentation.patch @@ -3,11 +3,11 @@ index 9493189..6090207 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,7 +27,7 @@ data class DependencySet(val group: String, val version: String, val modules: Li - val TEST_SNAPSHOTS = rootProject.findProperty("testUpstreamSnapshots") == "true" + val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM --val otelVersion = "2.11.0-adot1" +-val otelVersion = "2.11.0" +val otelVersion = "2.11.0-adot-lambda1" val otelSnapshotVersion = "2.12.0" - val otelAlphaVersion = if (!TEST_SNAPSHOTS) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" - val otelJavaAgentVersion = if (!TEST_SNAPSHOTS) otelVersion else "$otelSnapshotVersion-SNAPSHOT" + val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" + val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" diff --git a/lambda-layer/patches/opentelemetry-java-instrumentation.patch b/lambda-layer/patches/opentelemetry-java-instrumentation.patch index cca35f0ed0..a4004e3330 100644 --- a/lambda-layer/patches/opentelemetry-java-instrumentation.patch +++ b/lambda-layer/patches/opentelemetry-java-instrumentation.patch @@ -310,8 +310,8 @@ index 7900c9a4d9..80383d7c22 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "2.11.0-adot1" --val alphaVersion = "2.11.0-adot1-alpha" +-val stableVersion = "2.11.0" +-val alphaVersion = "2.11.0-alpha" +val stableVersion = "2.11.0-adot-lambda1" +val alphaVersion = "2.11.0-adot-lambda1-alpha" diff --git a/scripts/local_patch.sh b/scripts/local_patch.sh index d1c01c5d8b..079d4516b9 100755 --- a/scripts/local_patch.sh +++ b/scripts/local_patch.sh @@ -56,28 +56,4 @@ if [[ -f "$OTEL_JAVA_CONTRIB_PATCH" ]]; then rm -rf opentelemetry-java-contrib else echo "Skipping patching opentelemetry-java-contrib" -fi - - -# Patching opentelemetry-java-instrumentation -OTEL_JAVA_INSTRUMENTATION_PATCH=".github/patches/opentelemetry-java-instrumentation.patch" -if [[ -f "$OTEL_JAVA_INSTRUMENTATION_PATCH" ]]; then - echo "Patching opentelemetry-java-instrumentation" - git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git - cd opentelemetry-java-instrumentation - - echo "Checking out tag ${OTEL_JAVA_INSTRUMENTATION_VERSION}" - git checkout ${OTEL_JAVA_INSTRUMENTATION_VERSION} -b tag-${OTEL_JAVA_INSTRUMENTATION_VERSION} - patch -p1 < "../${OTEL_JAVA_INSTRUMENTATION_PATCH}" - git commit -a -m "ADOT Patch release" - - echo "Building patched opentelemetry-java-instrumentation" - ./gradlew clean assemble - ./gradlew publishToMavenLocal - cd - - - echo "Cleaning up opentelemetry-java-instrumentation" - rm -rf opentelemetry-java-instrumentation -else - echo "Skipping patching opentelemetry-java-instrumentation" fi \ No newline at end of file From 4e45621818aecbe446860b957cc9b074d140aeff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Aug 2025 16:42:20 +0000 Subject: [PATCH 19/83] Bump org.testcontainers:postgresql from 1.19.3 to 1.21.3 (#1139) --- appsignals-tests/contract-tests/build.gradle.kts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/appsignals-tests/contract-tests/build.gradle.kts b/appsignals-tests/contract-tests/build.gradle.kts index ac15939a62..046c88d3f0 100644 --- a/appsignals-tests/contract-tests/build.gradle.kts +++ b/appsignals-tests/contract-tests/build.gradle.kts @@ -56,8 +56,8 @@ dependencies { testImplementation("software.amazon.awssdk:sts") testImplementation(kotlin("test")) implementation(project(":appsignals-tests:images:grpc:grpc-base")) - testImplementation("org.testcontainers:kafka:1.19.3") - testImplementation("org.testcontainers:postgresql:1.19.3") + testImplementation("org.testcontainers:kafka:1.21.3") + testImplementation("org.testcontainers:postgresql:1.21.3") testImplementation("org.testcontainers:mysql:1.19.8") testImplementation("com.mysql:mysql-connector-j:8.4.0") } From ce79a852adfc9dd4dd9bf60a497f98f56f7a06fd Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Thu, 14 Aug 2025 11:08:50 -0700 Subject: [PATCH 20/83] fix(deps): update dependency otel/semconv to v1.29.0 (#1145) This PR upgrades these two upstream dependencies in ADOT Java from v1.28.0 to v1.29.0. io.opentelemetry.semconv:opentelemetry-semconv io.opentelemetry.semconv:opentelemetry-semconv-incubating Upstream OTel Java Agent v2.11 is using semconv v1.29.0, not v1.28.0. This PR keeps OTel and ADOT in sync automatically on semconv. ADOT now relies on "io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha" to get the version of semconv, instead of explicitly declaring one. In March 2024, upstream stopped using semconv keys from package "io.opentelemetry.semconv.SemanticAttributes". Semconv 1.25.0 migration #10983 https://github.com/open-telemetry/opentelemetry-java-instrumentation/pull/10983 ADOT Java did not follow. It's still using keys from this package: https://github.com/aws-observability/aws-otel-java-instrumentation/blob/release/v2.11.x/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java#L18 Unfortunately, in this Jan., this package was deleted from upstream. This is causing ADOT Java build break if we need upgrade upstream dependencies. This PR replaces these old keys in ADOT Java code base. Basically, it is doing the same update upstream had done in last March. The code change is safe. It has a limited scope that only updates the definitions of semconv keys. The text content of these keys remain untouched. Test: ./gradlew build Pass ./gradlew test. Pass Manual E2E test Pass Backward Compatibility: This change is backward compatible. It does not change any runtime behaviors. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- awsagentprovider/build.gradle.kts | 6 +- .../AwsMetricAttributeGenerator.java | 68 ++++++++++--------- .../AwsResourceAttributeConfigurator.java | 2 +- .../providers/AwsSpanMetricsProcessor.java | 4 +- .../providers/AwsSpanProcessingUtil.java | 20 +++--- ...AttributePropagatingSpanProcessorTest.java | 6 +- .../AwsMetricAttributeGeneratorTest.java | 52 +++++++++++--- .../AwsMetricAttributesSpanExporterTest.java | 4 +- .../AwsSpanMetricsProcessorTest.java | 2 +- .../providers/AwsSpanProcessingUtilTest.java | 9 ++- 10 files changed, 106 insertions(+), 67 deletions(-) diff --git a/awsagentprovider/build.gradle.kts b/awsagentprovider/build.gradle.kts index 1abe269cc0..3aeb79f935 100644 --- a/awsagentprovider/build.gradle.kts +++ b/awsagentprovider/build.gradle.kts @@ -26,8 +26,10 @@ base { dependencies { compileOnly("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") - compileOnly("io.opentelemetry.semconv:opentelemetry-semconv:1.28.0-alpha") - testImplementation("io.opentelemetry.semconv:opentelemetry-semconv:1.28.0-alpha") + compileOnly("io.opentelemetry.semconv:opentelemetry-semconv") + compileOnly("io.opentelemetry.semconv:opentelemetry-semconv-incubating") + testImplementation("io.opentelemetry.semconv:opentelemetry-semconv") + testImplementation("io.opentelemetry.semconv:opentelemetry-semconv-incubating") compileOnly("com.google.errorprone:error_prone_annotations:2.19.1") compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi") compileOnly("org.slf4j:slf4j-api") diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java index a1349f06b5..c96346dc88 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java @@ -15,36 +15,34 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.DB_CONNECTION_STRING; -import static io.opentelemetry.semconv.SemanticAttributes.DB_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.DB_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.DB_STATEMENT; -import static io.opentelemetry.semconv.SemanticAttributes.DB_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.DB_USER; -import static io.opentelemetry.semconv.SemanticAttributes.FAAS_INVOKED_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.FAAS_TRIGGER; -import static io.opentelemetry.semconv.SemanticAttributes.GRAPHQL_OPERATION_TYPE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_REQUEST_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_URL; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.NETWORK_PEER_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.NETWORK_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.NET_PEER_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.NET_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.NET_SOCK_PEER_ADDR; -import static io.opentelemetry.semconv.SemanticAttributes.NET_SOCK_PEER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.PEER_SERVICE; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SERVICE; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_SOCKET_ADDRESS; -import static io.opentelemetry.semconv.SemanticAttributes.SERVER_SOCKET_PORT; -import static io.opentelemetry.semconv.SemanticAttributes.URL_FULL; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_ADDRESS; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_PORT; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; +import static io.opentelemetry.semconv.UrlAttributes.URL_FULL; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_CONNECTION_STRING; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_NAME; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_USER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_NAME; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_TRIGGER; +import static io.opentelemetry.semconv.incubating.GraphqlIncubatingAttributes.GRAPHQL_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_STATUS_CODE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_PORT; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_ADDR; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_PORT; +import static io.opentelemetry.semconv.incubating.PeerIncubatingAttributes.PEER_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_METHOD; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SERVICE; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.LAMBDA_APPLICATION_SIGNALS_REMOTE_ENVIRONMENT; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_AGENT_ID; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_AUTH_ACCESS_KEY; @@ -99,8 +97,6 @@ import io.opentelemetry.sdk.trace.data.EventData; import io.opentelemetry.sdk.trace.data.ExceptionEventData; import io.opentelemetry.sdk.trace.data.SpanData; -import io.opentelemetry.semconv.ResourceAttributes; -import io.opentelemetry.semconv.SemanticAttributes; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URI; @@ -126,6 +122,14 @@ * represent "outgoing" traffic, and {@link SpanKind#INTERNAL} spans are ignored. */ final class AwsMetricAttributeGenerator implements MetricAttributeGenerator { + // ToDo: These two keys were deleted by upstream. Code need to be updated to capture the same + // information by using new keys. + // https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.28.0/semconv/src/main/java/io/opentelemetry/semconv/SemanticAttributes.java#L3784-L3795 + static final AttributeKey SERVER_SOCKET_ADDRESS = + io.opentelemetry.api.common.AttributeKey.stringKey("server.socket.address"); + + static final AttributeKey SERVER_SOCKET_PORT = + io.opentelemetry.api.common.AttributeKey.longKey("server.socket.port"); private static final Logger logger = Logger.getLogger(AwsMetricAttributeGenerator.class.getName()); diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java index d2decdc16c..01132a1919 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsResourceAttributeConfigurator.java @@ -15,7 +15,7 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.ResourceAttributes.SERVICE_NAME; +import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_LOCAL_SERVICE; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.UNKNOWN_SERVICE; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java index c2f133a48d..37436f5d3b 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java @@ -15,8 +15,8 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_STATUS_CODE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_STATUS_CODE; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_REMOTE_SERVICE; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresent; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java index 4211c24a7c..ebeb751acb 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java @@ -15,16 +15,16 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.DB_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.DB_STATEMENT; -import static io.opentelemetry.semconv.SemanticAttributes.DB_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_REQUEST_METHOD; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_TARGET; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SYSTEM; -import static io.opentelemetry.semconv.SemanticAttributes.URL_PATH; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.AWS_LAMBDA_FUNCTION_NAME_CONFIG; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.isLambdaEnvironment; import static software.amazon.opentelemetry.javaagent.providers.AwsAttributeKeys.AWS_LAMBDA_LOCAL_OPERATION_OVERRIDE; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java index 102f411013..443fca8b49 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessorTest.java @@ -15,9 +15,9 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.RPC_SYSTEM; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import io.opentelemetry.api.common.AttributeKey; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java index 8b362193df..a9f272483f 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java @@ -15,9 +15,39 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.ResourceAttributes.SERVICE_NAME; -import static io.opentelemetry.semconv.SemanticAttributes.*; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_ADDRESS; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_PORT; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; +import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; +import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; +import static io.opentelemetry.semconv.UrlAttributes.URL_FULL; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_CONNECTION_STRING; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_NAME; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_STATEMENT; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_SYSTEM; +import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_USER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_NAME; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_INVOKED_PROVIDER; +import static io.opentelemetry.semconv.incubating.FaasIncubatingAttributes.FAAS_TRIGGER; +import static io.opentelemetry.semconv.incubating.GraphqlIncubatingAttributes.GRAPHQL_OPERATION_TYPE; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; +import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_PORT; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_ADDR; +import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_SOCK_PEER_PORT; +import static io.opentelemetry.semconv.incubating.PeerIncubatingAttributes.PEER_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_METHOD; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SERVICE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -1237,28 +1267,28 @@ public void testDBClientSpanWithRemoteResourceAttributes() { // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS and SERVER_SOCKET_PORT exist, then // remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(SERVER_SOCKET_ADDRESS, "abc.com"); - mockAttribute(SERVER_SOCKET_PORT, 3306L); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, "abc.com"); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com|3306"); mockAttribute(DB_NAME, null); - mockAttribute(SERVER_SOCKET_ADDRESS, null); - mockAttribute(SERVER_SOCKET_PORT, null); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, null); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, null); // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS exist, then remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(SERVER_SOCKET_ADDRESS, "abc.com"); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, "abc.com"); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com"); mockAttribute(DB_NAME, null); - mockAttribute(SERVER_SOCKET_ADDRESS, null); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, null); // Validate behaviour of SERVER_SOCKET_PORT exist, then remove it. - mockAttribute(SERVER_SOCKET_PORT, 3306L); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, 3306L); when(spanDataMock.getKind()).thenReturn(SpanKind.CLIENT); actualAttributes = GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); - mockAttribute(SERVER_SOCKET_PORT, null); + mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, null); // Validate behaviour of only DB_NAME exist, then remove it. mockAttribute(DB_NAME, "db_name"); diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java index 7f7b340fef..f2c0a9d01b 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributesSpanExporterTest.java @@ -15,8 +15,8 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.MESSAGING_OPERATION; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java index 65bba3a513..28a628f526 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java @@ -15,7 +15,7 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.HTTP_RESPONSE_STATUS_CODE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java index 9318a4c4ca..798f3f060a 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java @@ -15,9 +15,12 @@ package software.amazon.opentelemetry.javaagent.providers; -import static io.opentelemetry.semconv.SemanticAttributes.*; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.PROCESS; -import static io.opentelemetry.semconv.SemanticAttributes.MessagingOperationValues.RECEIVE; +import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.RECEIVE; +import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Answers.CALLS_REAL_METHODS; import static org.mockito.Mockito.mock; From df7bb311901cb64843c24ebaa5a8c976d82a9139 Mon Sep 17 00:00:00 2001 From: Steve Liu Date: Thu, 14 Aug 2025 15:26:57 -0700 Subject: [PATCH 21/83] Add Netty BOM (#1148) *Description of changes:* Builds are failing image scanning for `CVE-2025-55163` which recently was added as a vulnerability. https://github.com/advisories/GHSA-prj3-ccx8-p6x4 Should revert this once we upgrade our aws-sdk dependency to version that has this PR added: https://github.com/aws/aws-sdk-java-v2/pull/6344 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --------- Co-authored-by: Thomas Pierce --- dependencyManagement/build.gradle.kts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 7bb24e3543..11a6441070 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -40,6 +40,9 @@ val dependencyBoms = listOf( "com.google.protobuf:protobuf-bom:3.25.1", "com.linecorp.armeria:armeria-bom:1.26.4", "io.grpc:grpc-bom:1.59.1", + // netty-bom is a fix for CVE-2025-55163 (https://github.com/advisories/GHSA-prj3-ccx8-p6x4). + // Remove once https://github.com/aws/aws-sdk-java-v2/pull/6344 is released. + "io.netty:netty-bom:4.1.124.Final", "io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:$otelAlphaVersion", "org.apache.logging.log4j:log4j-bom:2.21.1", "org.junit:junit-bom:5.10.1", From d345cda532568a0f51feb139a2be56d279a778d3 Mon Sep 17 00:00:00 2001 From: Steve Liu Date: Fri, 15 Aug 2025 11:34:52 -0700 Subject: [PATCH 22/83] Update image scan to point to 2.11.3 release (#1151) *Description of changes:* Update owasp.yml to scan 2.11.3 release. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/owasp.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index f0297648c1..a505a4f865 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -112,7 +112,7 @@ jobs: id: high_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.3" severity: 'CRITICAL,HIGH' logout: 'false' @@ -121,7 +121,7 @@ jobs: id: low_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.2" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.3" severity: 'MEDIUM,LOW,UNKNOWN' logout: 'false' From 492f40620f79d78d6ee43aedf1b19baff587bb0a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Aug 2025 23:25:36 +0000 Subject: [PATCH 23/83] Bump tempfile from 3.9.0 to 3.20.0 in /tools/cp-utility (#1134) --- tools/cp-utility/Cargo.lock | 49 +++++++++++++++---------------------- tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 21 insertions(+), 30 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index 9e9036e893..4934060ffe 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -2,12 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.4.1" @@ -30,9 +24,9 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.8" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", "windows-sys", @@ -40,9 +34,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" @@ -64,9 +58,15 @@ checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "once_cell" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "ppv-lite86" @@ -128,22 +128,13 @@ dependencies = [ "getrandom", ] -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "rustix" -version = "0.38.28" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ - "bitflags 2.4.1", + "bitflags", "errno", "libc", "linux-raw-sys", @@ -163,13 +154,13 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.9.0" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ - "cfg-if", "fastrand", - "redox_syscall", + "getrandom", + "once_cell", "rustix", "windows-sys", ] @@ -271,7 +262,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.4.1", + "bitflags", ] [[package]] diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index fdd8bff094..976b15dd44 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -10,7 +10,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests -tempfile = "3.9.0" +tempfile = "3.20.0" uuid = { version = "1.16.0", features = ["v4", "fast-rng"] } [profile.release] From bb4abb4cbb915dafd68381678a61fc51a55d2b3b Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Fri, 15 Aug 2025 18:16:43 -0700 Subject: [PATCH 24/83] [semconv]: Add functions to support migration of deprecated semconv keys (#1150) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds a utility method to help migrate deprecated semconv keys. It first checks the new key; if the new key is not available, it falls back to the legacy deprecated key. This PR also handles the following deprecated keys: MESSAGING_OPERATION SERVER_SOCKET_ADDRESS SERVER_SOCKET_PORT Tests: ./gradlew build test — Pass ./gradlew appsignals-tests:contract-tests:contractTests — Pass Manual E2E with Spring Boot sample app: Compared raw span data with and without this change — Pass By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../AwsMetricAttributeGenerator.java | 44 +++++---- .../providers/AwsSpanProcessingUtil.java | 24 ++++- .../AwsMetricAttributeGeneratorTest.java | 53 ++++++++--- .../providers/AwsSpanProcessingUtilTest.java | 90 +++++++++++++++++++ 4 files changed, 182 insertions(+), 29 deletions(-) diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java index c96346dc88..2d913e0269 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java @@ -22,6 +22,12 @@ import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; import static io.opentelemetry.semconv.UrlAttributes.URL_FULL; +// These DB keys have been deprecated: +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv-incubating/src/main/java/io/opentelemetry/semconv/incubating/DbIncubatingAttributes.java#L322-L327 +// They have been replaced with new keys: +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv/src/main/java/io/opentelemetry/semconv/DbAttributes.java#L77 +// TODO: Supporting new keys. Cannot do this now as new keys are not available in OTel Agent 2.11. +// TODO: Delete deprecated keys once they no longer exist in binding version of the upstream code. import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_CONNECTION_STRING; import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_NAME; import static io.opentelemetry.semconv.incubating.DbIncubatingAttributes.DB_OPERATION; @@ -34,7 +40,10 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_METHOD; import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_STATUS_CODE; import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; +// https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.34.0/semconv-incubating/src/main/java/io/opentelemetry/semconv/incubating/MessagingIncubatingAttributes.java#L236-L242 +// Deprecated, use {@code messaging.operation.type} instead. import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_PORT; @@ -87,6 +96,7 @@ import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isAwsSDKSpan; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isDBSpan; import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresent; +import static software.amazon.opentelemetry.javaagent.providers.AwsSpanProcessingUtil.isKeyPresentWithFallback; import com.amazonaws.arn.Arn; import io.opentelemetry.api.common.AttributeKey; @@ -122,15 +132,6 @@ * represent "outgoing" traffic, and {@link SpanKind#INTERNAL} spans are ignored. */ final class AwsMetricAttributeGenerator implements MetricAttributeGenerator { - // ToDo: These two keys were deleted by upstream. Code need to be updated to capture the same - // information by using new keys. - // https://github.com/open-telemetry/semantic-conventions-java/blob/release/v1.28.0/semconv/src/main/java/io/opentelemetry/semconv/SemanticAttributes.java#L3784-L3795 - static final AttributeKey SERVER_SOCKET_ADDRESS = - io.opentelemetry.api.common.AttributeKey.stringKey("server.socket.address"); - - static final AttributeKey SERVER_SOCKET_PORT = - io.opentelemetry.api.common.AttributeKey.longKey("server.socket.port"); - private static final Logger logger = Logger.getLogger(AwsMetricAttributeGenerator.class.getName()); @@ -293,9 +294,11 @@ private static void setRemoteServiceAndOperation(SpanData span, AttributesBuilde } else if (isKeyPresent(span, FAAS_INVOKED_NAME) || isKeyPresent(span, FAAS_TRIGGER)) { remoteService = getRemoteService(span, FAAS_INVOKED_NAME); remoteOperation = getRemoteOperation(span, FAAS_TRIGGER); - } else if (isKeyPresent(span, MESSAGING_SYSTEM) || isKeyPresent(span, MESSAGING_OPERATION)) { + } else if (isKeyPresent(span, MESSAGING_SYSTEM) + || isKeyPresentWithFallback(span, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) { remoteService = getRemoteService(span, MESSAGING_SYSTEM); - remoteOperation = getRemoteOperation(span, MESSAGING_OPERATION); + remoteOperation = + getRemoteOperationWithFallback(span, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); } else if (isKeyPresent(span, GRAPHQL_OPERATION_TYPE)) { remoteService = GRAPHQL; remoteOperation = getRemoteOperation(span, GRAPHQL_OPERATION_TYPE); @@ -772,7 +775,7 @@ private static Optional getSnsResourceNameFromArn(Optional strin * {address} attribute is retrieved in priority order: * - {@link SemanticAttributes#SERVER_ADDRESS}, * - {@link SemanticAttributes#NET_PEER_NAME}, - * - {@link SemanticAttributes#SERVER_SOCKET_ADDRESS} + * - {@link SemanticAttributes#NETWORK_PEER_ADDRESS} * - {@link SemanticAttributes#DB_CONNECTION_STRING}-Hostname * * @@ -780,7 +783,7 @@ private static Optional getSnsResourceNameFromArn(Optional strin * {port} attribute is retrieved in priority order: * - {@link SemanticAttributes#SERVER_PORT}, * - {@link SemanticAttributes#NET_PEER_PORT}, - * - {@link SemanticAttributes#SERVER_SOCKET_PORT} + * - {@link SemanticAttributes#NETWORK_PEER_PORT} * - {@link SemanticAttributes#DB_CONNECTION_STRING}-Port * * @@ -799,9 +802,9 @@ private static Optional getDbConnection(SpanData span) { String networkPeerAddress = span.getAttributes().get(NET_PEER_NAME); Long networkPeerPort = span.getAttributes().get(NET_PEER_PORT); dbConnection = buildDbConnection(networkPeerAddress, networkPeerPort); - } else if (isKeyPresent(span, SERVER_SOCKET_ADDRESS)) { - String serverSocketAddress = span.getAttributes().get(SERVER_SOCKET_ADDRESS); - Long serverSocketPort = span.getAttributes().get(SERVER_SOCKET_PORT); + } else if (isKeyPresent(span, NETWORK_PEER_ADDRESS)) { + String serverSocketAddress = span.getAttributes().get(NETWORK_PEER_ADDRESS); + Long serverSocketPort = span.getAttributes().get(NETWORK_PEER_PORT); dbConnection = buildDbConnection(serverSocketAddress, serverSocketPort); } else if (isKeyPresent(span, DB_CONNECTION_STRING)) { String connectionString = span.getAttributes().get(DB_CONNECTION_STRING); @@ -954,6 +957,15 @@ private static String getRemoteOperation(SpanData span, AttributeKey rem return remoteOperation; } + static String getRemoteOperationWithFallback( + SpanData span, AttributeKey remoteOpKey, AttributeKey remoteOpFallbackKey) { + String remoteOp = span.getAttributes().get(remoteOpKey); + if (remoteOp == null) { + return getRemoteOperation(span, remoteOpFallbackKey); + } + return remoteOp; + } + /** * If no db.operation attribute provided in the span, we use db.statement to compute a valid * remote operation in a best-effort manner. To do this, we take the first substring of the diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java index ebeb751acb..1627de190c 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java @@ -23,6 +23,7 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_REQUEST_METHOD; import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static software.amazon.opentelemetry.javaagent.providers.AwsApplicationSignalsCustomizerProvider.AWS_LAMBDA_FUNCTION_NAME_CONFIG; @@ -153,6 +154,23 @@ static boolean isKeyPresent(SpanData span, AttributeKey key) { return span.getAttributes().get(key) != null; } + static boolean isKeyPresentWithFallback( + SpanData span, AttributeKey key, AttributeKey fallbackKey) { + if (span.getAttributes().get(key) != null) { + return true; + } + return isKeyPresent(span, fallbackKey); + } + + static T getKeyValueWithFallback( + SpanData span, AttributeKey key, AttributeKey fallbackKey) { + T value = span.getAttributes().get(key); + if (value != null) { + return value; + } + return span.getAttributes().get(fallbackKey); + } + static boolean isAwsSDKSpan(SpanData span) { // https://opentelemetry.io/docs/specs/otel/trace/semantic_conventions/instrumentation/aws-sdk/#common-attributes return "aws-api".equals(span.getAttributes().get(RPC_SYSTEM)); @@ -170,7 +188,8 @@ static boolean shouldGenerateDependencyMetricAttributes(SpanData span) { } static boolean isConsumerProcessSpan(SpanData spanData) { - String messagingOperation = spanData.getAttributes().get(MESSAGING_OPERATION); + String messagingOperation = + getKeyValueWithFallback(spanData, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); return SpanKind.CONSUMER.equals(spanData.getKind()) && PROCESS.equals(messagingOperation); } @@ -192,7 +211,8 @@ static boolean isLocalRoot(SpanData spanData) { private static boolean isSqsReceiveMessageConsumerSpan(SpanData spanData) { String spanName = spanData.getName(); SpanKind spanKind = spanData.getKind(); - String messagingOperation = spanData.getAttributes().get(MESSAGING_OPERATION); + String messagingOperation = + getKeyValueWithFallback(spanData, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); InstrumentationScopeInfo instrumentationScopeInfo = spanData.getInstrumentationScopeInfo(); return SQS_RECEIVE_MESSAGE_SPAN_NAME.equalsIgnoreCase(spanName) diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java index a9f272483f..81667f6313 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGeneratorTest.java @@ -38,6 +38,7 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_TARGET; import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_URL; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_SYSTEM; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_PEER_NAME; @@ -1264,31 +1265,31 @@ public void testDBClientSpanWithRemoteResourceAttributes() { assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); mockAttribute(NET_PEER_PORT, null); - // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS and SERVER_SOCKET_PORT exist, then + // Validate behaviour of DB_NAME, NETWORK_PEER_ADDRESS and NETWORK_PEER_PORT exist, then // remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, "abc.com"); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, 3306L); + mockAttribute(NETWORK_PEER_ADDRESS, "abc.com"); + mockAttribute(NETWORK_PEER_PORT, 3306L); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com|3306"); mockAttribute(DB_NAME, null); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, null); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, null); + mockAttribute(NETWORK_PEER_ADDRESS, null); + mockAttribute(NETWORK_PEER_PORT, null); - // Validate behaviour of DB_NAME, SERVER_SOCKET_ADDRESS exist, then remove it. + // Validate behaviour of DB_NAME, NETWORK_PEER_ADDRESS exist, then remove it. mockAttribute(DB_NAME, "db_name"); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, "abc.com"); + mockAttribute(NETWORK_PEER_ADDRESS, "abc.com"); validateRemoteResourceAttributes("DB::Connection", "db_name|abc.com"); mockAttribute(DB_NAME, null); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_ADDRESS, null); + mockAttribute(NETWORK_PEER_ADDRESS, null); - // Validate behaviour of SERVER_SOCKET_PORT exist, then remove it. - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, 3306L); + // Validate behaviour of NETWORK_PEER_PORT exist, then remove it. + mockAttribute(NETWORK_PEER_PORT, 3306L); when(spanDataMock.getKind()).thenReturn(SpanKind.CLIENT); actualAttributes = GENERATOR.generateMetricAttributeMapFromSpan(spanDataMock, resource).get(DEPENDENCY_METRIC); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_TYPE)).isNull(); assertThat(actualAttributes.get(AWS_REMOTE_RESOURCE_IDENTIFIER)).isNull(); - mockAttribute(AwsMetricAttributeGenerator.SERVER_SOCKET_PORT, null); + mockAttribute(NETWORK_PEER_PORT, null); // Validate behaviour of only DB_NAME exist, then remove it. mockAttribute(DB_NAME, "db_name"); @@ -1614,6 +1615,36 @@ public void testDbUserPresentAndIsDbSpanFalse() { assertThat(actualAttributes.get(AWS_REMOTE_DB_USER)).isNull(); } + @Test + public void testGetRemoteOperationWithFallback_NewKeyPresent() { + mockAttribute(MESSAGING_OPERATION_TYPE, "send"); + mockAttribute(MESSAGING_OPERATION, "publish"); + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo("send"); + } + + @Test + public void testGetRemoteOperationWithFallback_DeprecatedKeyPresent() { + mockAttribute(MESSAGING_OPERATION, "publish"); + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo("publish"); + } + + @Test + public void testGetRemoteOperationWithFallback_BothKeysAbsent() { + String result = + AwsMetricAttributeGenerator.getRemoteOperationWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION); + + assertThat(result).isEqualTo(UNKNOWN_REMOTE_OPERATION); + } + @Test public void testNormalizeRemoteServiceName_NoNormalization() { String serviceName = "non aws service"; diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java index 798f3f060a..d4675674dc 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java @@ -18,6 +18,7 @@ import static io.opentelemetry.semconv.HttpAttributes.HTTP_REQUEST_METHOD; import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION; +import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MESSAGING_OPERATION_TYPE; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.PROCESS; import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.RECEIVE; import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; @@ -365,6 +366,14 @@ public void testIsConsumerProcessSpanFalse() { assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isFalse(); } + @Test + public void testIsConsumerProcessSpanFalse_with_MESSAGING_OPERATION_TYPE() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(RECEIVE); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(PROCESS); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isFalse(); + } + @Test public void testIsConsumerProcessSpanTrue() { when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(PROCESS); @@ -372,6 +381,14 @@ public void testIsConsumerProcessSpanTrue() { assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isTrue(); } + @Test + public void testIsConsumerProcessSpanTrue_with_MESSAGING_OPERATION_TYPE() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(PROCESS); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn(RECEIVE); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + assertThat(AwsSpanProcessingUtil.isConsumerProcessSpan(spanDataMock)).isTrue(); + } + // check that AWS SDK v1 SQS ReceiveMessage consumer spans metrics are suppressed @Test public void testNoMetricAttributesForSqsConsumerSpanAwsSdkV1() { @@ -436,6 +453,26 @@ public void testNoMetricAttributesForAwsSdkSqsConsumerProcessSpan() { .isTrue(); } + @Test + public void + testNoMetricAttributesForAwsSdkSqsConsumerProcessSpan_with_MESSAGING_OPERATION_TYPE() { + InstrumentationScopeInfo instrumentationScopeInfo = mock(InstrumentationScopeInfo.class); + when(instrumentationScopeInfo.getName()).thenReturn("io.opentelemetry.aws-sdk-2.2"); + when(spanDataMock.getInstrumentationScopeInfo()).thenReturn(instrumentationScopeInfo); + when(spanDataMock.getKind()).thenReturn(SpanKind.CONSUMER); + when(spanDataMock.getName()).thenReturn("Sqs.ReceiveMessage"); + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(PROCESS); + + assertThat(AwsSpanProcessingUtil.shouldGenerateServiceMetricAttributes(spanDataMock)).isFalse(); + assertThat(AwsSpanProcessingUtil.shouldGenerateDependencyMetricAttributes(spanDataMock)) + .isFalse(); + + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn(RECEIVE); + assertThat(AwsSpanProcessingUtil.shouldGenerateServiceMetricAttributes(spanDataMock)).isTrue(); + assertThat(AwsSpanProcessingUtil.shouldGenerateDependencyMetricAttributes(spanDataMock)) + .isTrue(); + } + @Test public void testSqlDialectKeywordsOrder() { List keywords = getDialectKeywords(); @@ -454,4 +491,57 @@ public void testSqlDialectKeywordsMaxLength() { assertThat(MAX_KEYWORD_LENGTH >= keyword.length()); } } + + @Test + public void testIsKeyPresentWithFallback_NewKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isTrue(); + } + + @Test + public void testIsKeyPresentWithFallback_DeprecatedKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isTrue(); + } + + @Test + public void testIsKeyPresentWithFallback_BothKeysAbsent() { + assertThat( + AwsSpanProcessingUtil.isKeyPresentWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isFalse(); + } + + @Test + public void testGetKeyValueWithFallback_NewKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION_TYPE)).thenReturn("send"); + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isEqualTo("send"); + } + + @Test + public void testGetKeyValueWithFallback_DeprecatedKeyPresent() { + when(attributesMock.get(MESSAGING_OPERATION)).thenReturn("publish"); + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isEqualTo("publish"); + } + + @Test + public void testGetKeyValueWithFallback_BothKeysAbsent() { + assertThat( + AwsSpanProcessingUtil.getKeyValueWithFallback( + spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) + .isNull(); + } } From 71c3a88132536887717ae698374aa095d43c427c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 11:45:02 -0700 Subject: [PATCH 25/83] Bump org.testcontainers:mysql from 1.19.8 to 1.20.3 (#938) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [org.testcontainers:mysql](https://github.com/testcontainers/testcontainers-java) from 1.19.8 to 1.20.3.
Release notes

Sourced from org.testcontainers:mysql's releases.

1.20.3

What's Changed

🚀 Features & Enhancements

🐛 Bug Fixes

📖 Documentation

🧹 Housekeeping

1.20.2

What's Changed

🚀 Features & Enhancements

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=org.testcontainers:mysql&package-manager=gradle&previous-version=1.19.8&new-version=1.20.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) Dependabot will merge this PR once CI passes on it, as requested by @thpierce. [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Thomas Pierce --- appsignals-tests/contract-tests/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appsignals-tests/contract-tests/build.gradle.kts b/appsignals-tests/contract-tests/build.gradle.kts index 046c88d3f0..0a90f08fb3 100644 --- a/appsignals-tests/contract-tests/build.gradle.kts +++ b/appsignals-tests/contract-tests/build.gradle.kts @@ -58,7 +58,7 @@ dependencies { implementation(project(":appsignals-tests:images:grpc:grpc-base")) testImplementation("org.testcontainers:kafka:1.21.3") testImplementation("org.testcontainers:postgresql:1.21.3") - testImplementation("org.testcontainers:mysql:1.19.8") + testImplementation("org.testcontainers:mysql:1.21.3") testImplementation("com.mysql:mysql-connector-j:8.4.0") } From 39e6c3080e4404164ed3d14f08f35c5e0feb6961 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 19:23:30 +0000 Subject: [PATCH 26/83] Bump docker/library/rust from 1.86 to 1.89 (#1135) --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1390f9d2c5..89b45f11e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ # permissions and limitations under the License. # Stage 1: Build the cp-utility binary -FROM public.ecr.aws/docker/library/rust:1.86 as builder +FROM public.ecr.aws/docker/library/rust:1.89 as builder WORKDIR /usr/src/cp-utility COPY ./tools/cp-utility . From 91346792f04b11a355e1f1b79993597fc0186611 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 23:15:30 +0000 Subject: [PATCH 27/83] Bump burrunan/gradle-cache-action from 2 to 3 (#1153) --- .github/workflows/soak-testing.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index f7c1ec8175..3fd1173c85 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -110,7 +110,7 @@ jobs: aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws - name: Build Sample App locally directly to the Docker daemon - uses: burrunan/gradle-cache-action@v2 + uses: burrunan/gradle-cache-action@v3 with: arguments: jibDockerBuild env: From e77a26dd03be8e49e8014263761292fbdc138e98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Aug 2025 23:56:39 +0000 Subject: [PATCH 28/83] Bump uuid from 1.16.0 to 1.18.0 in /tools/cp-utility (#1154) --- tools/cp-utility/Cargo.lock | 92 ++++++++++++++++++++++++++++++++++++- tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 91 insertions(+), 3 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index 4934060ffe..711702b632 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -8,6 +8,12 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + [[package]] name = "cfg-if" version = "1.0.0" @@ -50,6 +56,16 @@ dependencies = [ "wasi", ] +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + [[package]] name = "libc" version = "0.2.171" @@ -62,6 +78,12 @@ version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + [[package]] name = "once_cell" version = "1.21.3" @@ -141,6 +163,12 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + [[package]] name = "syn" version = "2.0.100" @@ -173,12 +201,14 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "uuid" -version = "1.16.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" dependencies = [ "getrandom", + "js-sys", "rand", + "wasm-bindgen", ] [[package]] @@ -190,6 +220,64 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + [[package]] name = "windows-sys" version = "0.52.0" diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index 976b15dd44..7ff1c7586b 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -11,7 +11,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests tempfile = "3.20.0" -uuid = { version = "1.16.0", features = ["v4", "fast-rng"] } +uuid = { version = "1.18.0", features = ["v4", "fast-rng"] } [profile.release] # Levers to optimize the binary for size From 24c2618865a95d90dca65f4952b13f6c27f65c72 Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Wed, 20 Aug 2025 07:23:04 -0700 Subject: [PATCH 29/83] fix(deps): upgrade Gradle to v8.14.3 to align with upstream (#1158) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ADOT Java was using an outdated Gradle release, particularly the Lambda build (v8.1.1). This PR upgrades both builds to v8.14.3, matching the version used by the upstream Java Agent. ./gradlew wrapper --gradle-version 8.14.3 Tests performed: - Local build: ./gradlew build ✅ - Unit tests: ./gradlew test ✅ - Smoke/contract tests: ./gradlew appsignals-tests:contract-tests:contractTests ✅ By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- gradle/wrapper/gradle-wrapper.jar | Bin 43453 -> 43583 bytes gradle/wrapper/gradle-wrapper.properties | 2 +- gradlew | 5 ++++- gradlew.bat | 2 ++ .../gradle/wrapper/gradle-wrapper.jar | Bin 61574 -> 62076 bytes .../gradle/wrapper/gradle-wrapper.properties | 2 +- lambda-layer/gradlew | 8 ++++---- lambda-layer/gradlew.bat | 1 - 8 files changed, 12 insertions(+), 8 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index e6441136f3d4ba8a0da8d277868979cfbc8ad796..a4b76b9530d66f5e68d973ea569d8e19de379189 100644 GIT binary patch delta 12612 zcmY+pRa6|n(lttO3GVLh?(Xh3xVuAe26uONcL=V5;I6?T_zdn2`Oi5I_gl9gx~lft zRjVKRp?B~8Wyrx5$mS3|py!Njy{0Wt4i%@s8v88pK z6fPNA45)|*9+*w5kcg$o)}2g}%JfXe6l9ig4T8ia3Hlw#3f^fAKW63%<~GZJd-0YA z9YjleCs~#Y?V+`#nr+49hhsr$K$k!lg}AZDw@>2j=f7t~5IW6#K|lAX7|^N}lJ)I!km`nrwx> z))1Es16__aXGVzQM0EC8xH+O!nqTFBg9Ci{NwRK*CP<6s`Gq(~#lqb(zOlh6ZDBK* zr$|NDj^s6VanrKa+QC;5>twePaexqRI%RO~OY075y?NN90I|f^(P# zF=b>fZ73b5JzD`#GC3lTQ_B3lMeBWgQUGYnFw*HQC}^z{$6G4j(n4y-pRxPT(d2Wgb%vCH(?+t&Pj z)QM`zc`U`+<~D+9E{4Uj2kc#*6eZMU$4Oj6QMfA^K!rbl`iBix=2sPrs7j@aqIrE zTaZJ2M09>rp$mgyUZ!r2$UK{+DGqgl`n;*qFF~M(r#eh`T{MO?2&j?xgr8FU$u3-` zhRDc_I23LL4)K&xg$^&l-W=!Jp-P(_Ie07q>Je;QLxi8LaEc%;WIacJD_T69egF?7 z;I_Sg_!+qrur8$Hq4grigaiVF>U7uWJ@Hkd&%kmFnQN-P^fq0gB1|uRt!U#X;DnlV zo?yHWTw7g5B;#xxY`adhi4yZn@f(7-Xa(J6S=#d@&rlFw!qfvholE>MEb|VWn^g}G zMSrK&zQ^vDId&ojL!{%{o7?s{7;{+u%L{|tar(gp?Uxq3p?xAysB>0E$eG#$tvkk9 z2Q2gEP17{U6@UD*v({5MP-CTZfvWMItVjb4c;i~WLq&{?Q1(koX&vt7+$z}10{^Id z{KDjGi0JpD7@;~odF__0m|p;5rIrHidOP9^mwKe#-&JX-X@acc)06G{LO1Wu)#gvZ za~y9(fhA%UwkDOVU1LBJ`0ROE z4&)dJKK%mG@+CIm?+wt9f~@xIMr8}UH*K1j| z0pppo{7gv3v{URwxVMeg>Ps!L5IKxm zjac2egjgb0vH5i75$s|sY_RYec#>faqJk|AGgV;v=^%BM(^p{p;(^SVt-88G9f!q; z>p}9E4^f0=01S2pQBE4}9YqE%TV)*hlU^8k9{&=K76+*Ax^r=AkBb%OCP^P2nm0Ri z;D-|Zk?gGeU<12ti2CnPVNA(Pb)02+r|&yTWW-OJO7 zNLb0pps6aN?A~NJp5kj{{IOlf!5KWMleV@-hYLift)D>-7K+tgs=7Ake}oBnIy-y1 z(Hn@Hjw=_(x>dO5ysQsrnE%A*bk0K<-j{1Yqz@#n#jOL^AzCr#wR|WYzqk6i7v)Lf zkXdKxzuu20aP{Tbg$(+9&oh7cd(Uoqqf<#ujb$q4sZ~gxFbQfS zS)kNklyL*{2AELgjZ(LBu*>S(oH5AaJ;YiB@;l@=O%F6B?oanzoYRM^fQ9-<~^=3$H0g^JPMLQo@SZ@QuNvy)tyJ)LSj`+()#fy?{aV4Yg^7dlQ7AQM^3GLCR2dAFR zJjtfKiVqF`l-H_fz0HD|9g>)pOxn}k!vdZ=DO!7Sikm{Z%P6BrRkBS6W?ZB5W&7rT z@uYpf@M@a!z7H&o@-yrcCL^Ff3e7p3T`R9p?@o-acXmbTSa0>ZANzCSgovsd%;i$| zVus`not!oL#(W`L-!9w0jdaECaG4hk{V7IOs676ZquZH~0TX5hDq|)x z6T497l|E?f4)LA>j=S8}b$0LS=I4h|hUFJYJODT8Li@#6kF$k0)@*l{RnM1HQ%?VT ze-Pqlc!~t(oumVC*?5fwR;P6u{tHaZ~*LlD;B)4f? z?lpWfa2P@)g57flVl83Ej%P`2)gGyaPjhvD(%i~{`2b>#3!+y&` z!2nuwHMFA-zUY}f1^0B8<`N)Gr=A4TS@b1qykmd0Pq{?r)+1^^+D(=xasb^Tf!oK9 zBLL+*p6M_#ufgLzgq1zcSwZsZnQWFLC3`Yxdg-2=*tT`J9nrfYt)RF)YryBf8_gW{ zvKbB+oZLehfT)S#<|y1)E0hW^?+AnqPXq9Hu;v3dsMGdr{SVyF63;K<8VcgI#~}1i zLYSBL0K;RTT(;>2x=*!1Di9w0mwr;`CN}kM65|Ay{~z}_^JKOsRaN<~#9O^iiW<5P zYN7r~HV!#Nz~IZU`P>1Xe%4f~K}KcF#X&5kO*G}-)74S*tQ8CietdPcA1Yl;S=Mr# z`#MYY!{s^uo=jn7;k6O%(}fN+*0cWMpt~#n9DR<3NyU?+3D^AgI}S)Cu-Tljg`VY} zX1=fq$?8$DtOeGxE6f8lbS_6Q3C4+LDTO$}_IpM$Xv<|QSC%+Oll^q$y`7o@jD{dp zNDl|&X)r7wETa-#h*d`KXntxI(Y{vLha{$0i7@G8xx^m=c<{lJ9?p-i!^W{%j7-oo z0W^SzZ^(Wkyz*We{lEn%Yhu-ycUOHtrRiVJL4~&S91*D0MrLu}Q>v-Mc?GcWfpyz% zX|UvcN@krFO#@v|CtYM}g|=L3%aMo$E5<@CM%c*;?u>LOTz00@+dt1{yg1y=$h+{|D17U}$*^fE^H&8b431EUE z<9tv0V_#%#&1N#j7AKCj!tTK@J%oFW*ESW<(#Gl#Xs%v<@AitI?s92nLzm<)w3Wkkom1f$gcdUi%g_*jofy&}N#luL<$GVIe{iQkQ)sIHVy zBgItnPBFamrv6Kb{eE($Q(f`ZPeW!Hm%Y@F*OF1sKB{Yy|C>WEv_mfvv-N-jh)B-5 z4a!1WcT@9a+hGaBrc~sz=>G?Q!*Zp^JFRUvBMyNR1;`)j$RhH$6gEyVKhd$&K-CFT zXaWC-Y=fyOnqT84iMn9o5oLEOI(_3fk!W^8-74|q1QhQ|CmT0i=b;6Z3u?E{p7V{? z;f#Q-33!L+4&QQcZ~GAqu$NS{M;u%`+#9=7^Oa5PKvCCCWNG_~l(CidS!+xr-*gg{ z$UQ`_1tLT_9jB=Hckkwu>G{s0b0F4bnR7GibmHo?>TR&<3?D;5Fb#gd8*wYa$$~ar z7epl1qM)L{kwiNjQk}?)CFpNTd?0wAOUZ|gC{Ub|c-7h~+Rm(JbdoRe!RNVBQi!M8 z+~U6E2X&KSA*T6KJvsqwqZl#1&==Dm(#b^&VAKQ>7ygv*Fyr;)q9*^F@dCTg2g!w~ z%hg)UXAUyIpIbLXJv1nZX+a_C)BOH2hUim|>=JHCRf(!dtTidb&*~I!JrfRe+PO>w z@ox$G2a3i9d_N9J=|2$y2m-P&#PTNwe!oLBZFs;z|F5kXvBDn<)WwE0E3$ow=zg3R zK(9;sf0t;VEV3@gAg7jRtnj%-6O@!Hvg*;XcUAw}!=2*aErvB(eQIm(-UGmq^J=XN zTqJo$Y|WKo^HlBF3BXJrA#}7ZLg=r*w`I*~Ix`o&2k8^(0mt8Rp=A>F`&gehhp@Jy z^e^#B2!~$LvNCKugg)8)-G%&THdk~kfextilegP9?#C#()F59U$&eo(h|5>ceo*Em z{PEE79T$YP|Kr7K`WBHbtQwyxFkCl6xX&+oUf90B5xoi3_5KHHCyEE*oPbOQkfMz& z6^hT8_NXd2iWk{q9IKae1{_7hMPH8I7_BMtVOM4 z6jm?E0QJOn$qrgsJ`9w##GB9?G})-GXSQo6(tYS(Q0-Ct$co?Zzl0?NHsDRron?;_ zZZgQg)%XW>P?8_&zoGuF(>Och2kEJXsu1_X&~w87x!b z>~h!a>e7{`p@+#hXF88wI*JeWRZ;J4ev4<}HWf|Z;(7$E!S5l9wzBHFe>^I{2`a;a)QnAwa2xv1e(bq$<}!8o^ofGvYpk7dBR+`*%iE;hUY5 zaHF}OjGO9r*{%lmcK^uFiTHgoUD`^9Nx@~;Bg!V* zuuJ&ti{DQiq7RyJAR94wem{}cPK1J(Yxnn_{=>?USqz-~&QXRStS^s-7TksZ$AEI! z#og36s3JGtGU{CnDHRFtipFqvrE*gw7_K@NN0h+ItTq@4fqN!HeQU1y7*X?9+IfZT4Vxebpt z%#VzgdDK~-&+=Z*#>=n#XUhNvBZp3=Cr41jMqwJkHLf3L7Vm~V#GgJ(Jpii~PmJ#s zA7Ft!{xD@z>9DUb4JbiUBdNEcU4BO$651iN*mp*f)HbRRM`Cx5cR?5IfEcU{IZWwf zz(M6CDv)>xa3x}K6%tP^i15P1&&DOLK=k~+jNR$UK3frSl+|PjSC-dBItvD~LL! z>_g(YYdO4k(5EbPOw+v+;G7~jYm>F@Ai|o`gs%F)F8tDz$dl7Q%aCe|v|$UkAul_R zNlA-beBX^IJU?kgS`E$it7nF4DaI!SJAGq)2P&Few(-|tp z?K+%D3e4{pfkayrcbm0ftu6Ol2ZzdKM+4i!hNP3NRL`EvvZJ3yvNr2MV%igZ4kj``Qrdb_OI$7jWP z;l0DYf&0(-*QcP5zrP`HVznW+SbH63Qx$7_9~NjRNg7eKqI!UJ=XH`g^=t8GiFTu( z?2L{JKEu%jJx&XjNzU(*!ZNmL1@RlJA0G$2_LrAb_7lmjil(GSlSM zwTes`m+3R;3#N~Xg#9owh3ycXV8@ZlaY_16kpPFA={721b~URO4HD3sp%fmkZM}k) zZB0#)kP=RkNB~R-MCk8aljG_bagt4vIb~8)BV%(b8_;)&Kf9GX+%O_cNG|(D$!3&D zL(I8}*LqN5NntipFlN13=`D>6!{D@CFMBH0kW3=HccJV+xW~|$qeFR5i-2{X+iWMu zI2$gepQ)H_B%ip_BlWOQ*|pErXs|4ir{IHccgaIJ84irE{?+$KDABXr&f`jB^V-c% z$$u`uU1YB^{<+UN2cNg#7&0bz@yF?5>j|;)5&IV3wIQp58X#OE-M^$HdyvL|Um5t? zhZlAG!Mz%XkUe3t471JM*Yur}o30vzu6RN7gJyNcf!IItsDO730mcJ*O!~V``y5=3 zNJGp34DZ}wd1H6V`Uuy%es>BiO_aE-S8jzir#$& zyk)@2a5tP$@g%jW^b^JGdo)X@Q%sE`^lDQmY9m%uDFpPX`w9%=yQ+nneMm#OaXcD` z9}{tn5A2b2z9783vL2_jSao?uxJhWJoq%47*RafM4o0@gY(p)F>qT4^XM5GLzV#6j zC+HoGhAne7o_w{WUo(B++z7lU3Y0k1rYv9|TSv0vR-Du(5=VakbbelgZTeDn+a_Wv zq_j-^+Qz1WAl;Zg>ahX|CERbX1V%B!hTKN?M}fGoA07M(WU&NfT&TmN`P@56U2 z^)vLDs|Ln~0iTtn-?KTeQl@T&bskJFuTUS!m+$CS9vnd}8(UMO|Kv6TCfGN9NUu&4 zL{)GTxPq>fwsJ~aU=4Qhuq8*RzDsP(LZh$BHezq&9gK$IS<|DYbm})$QTGCS6T;Dr zEkLct!b+#<1r9OKG@P!f1wm8>=Nz!7OzJm!g<+`?N3;YaA3(P@EL=(sTaRMDD!c8=-XN^4BXp(eVkj$NmEMYPP>YJ4bJ3yUud z<3BeJAJ$6z^TuywnfH5lv#$lgwraNw{IV=tIznPH1DT`v-5yS=!)J<}xxl}uZf9azA2A97Haf!;<3y01hlw?dWNEv@TLi1s-mO4vmIT%O_42nS z$VRWrs9NngqRRkWAnWkn%`Rw@?wH|)7XL`EL5EZu$qyJW31&CB^T_)qwIv!{;E_6 zo-9XAryQRlk-O0>o#-SZO>|6OYq;}<*>Wu1AsVRiXY4f8qb;+sItv3AyS!4Ry+q}) zA!pAB|BmC;=RIOk^^vlsEH(!Q!7_1FK~ZB2err*o!+b(r=m1b?$6d!%zmN+69LXnT z&gRmM+n_R-F@sT*IYv0_mGPvur!u`iWbQO7SqiGFLeY&yga zf`lM&B74FA2C?N@8_z652fjhBEoDUKbP8hL{0{HAF%qDo7)o3=3rg#6)T7%%5^wl% z9R0*S*<~>nzYOdQk2l`9h#t+gJy_xujw6xjV(8S<_DbVg61&pT%Hi42l%D73G?adn znB%UdNM0p}lEF-P2%TAMam2zpQev71e>a$$%i+r~b+D9G9pF|oY_*(-u*89oKsXLY+UIbqq)MQ%(GYS{(*n_S_*RN$*~`zUtab%0aKwhx znc)Yo?{xq1sJCgQD)TeTci1ucvbez9q=A72H(-SB18Kl&6^vHV8^i!p@>iF!DIw17 z+8Q)TNisB7>pwyww4y)yJx*wX6SJO78eLBC-ar1+k$Z9fy;wBD|3kzI{<+l*>PSY^ z_?nLOZaeWbU@C3hfK?X;Di*8CHCPkx2qco6(ZyJdqSzp^TJ_5Lpa0UP{Gy+!b0Lr% z@xYxSjUKoY6L#>$qx~KD$-0=|OF7zhVP~ntMgEALYPIfhj@+ z!;JJ7te>CcovruwHsJH6Lta$nm|%^C@=V-rmhU{+I~0(|XHQ9jt@L7pb{gx#{4r!) zg($FyFTslcgu(~6lYr$nW?)%*l#VJ=R-jxK(x=t1bWlu(nL66T#qj%3aZ@uVhy}Co zDU_q61DD5FqqJ*#c|(M5tV)XBN?Ac^12*q)VN4yKPJ|#==S_`_QD9|0ls!`2)SwuHDRA_OfXQDq3%qW&MZB}Z!=k-9xqev8jHz(H z{^D@cIB~QiK>~wa)A&^Ll^Wi6QgCzU;iv-BHsLBs zH7=jN%|>0S`SjP%M&AF1PNVDp_FZ?2Bm@7`DC&v(pYrw!!yD#4 z6+<=HS0Ln6MhoKxF<%~H`y20{vf#pxh=;j{zY381gvAFekgG|>G1zo8$&az{V=;JR zy_puF4$L$?EMhT?;TpQoR*j16ll`#AS4e96C}yp_aGKkBe?1H|k_;gG-~Xorc<;lI zkB}fB{$c-D2mGA&{rm<*@F5)c3X+6??g~XoEwuzSuch0D@W~P5(2I8v8F$c2$Vw51 zP#YLSBDqtWW^EYBl^QYHF+MA7am6f4DOhwnJM=W9$uvMOsZ%_~?)2C#wb?CkI$7{K zEi)=#|5pFvg^){zK5kpBLjB2kZ+$ZB|L=W|aNwyyb(gC2l7bcpx{E-H@)q6@D6N^xh`{1E%ItF2$eeB_SjI@b2WgTpS1thwg&n`jiIzw^TtXUyB{00($GIq>vbj|}bav}}Q_~wp3>k8!E@hVC;OMUTu|= zAy#vXH*GrUHu7^cNZWe1>y;2(51js9wbu+R3Aa*(wzH9+X0dIsf&gc_x|_LP z>~CF^?(~U}+l~ehe|i>?4eo!xkq&Lk+RR-1duNP#o~>@1x)s&i&u zRaYL@+D&_M|JLI6fHbEr_`U;HgPTh#E3?sB)A$*gqyBgg*ql|a-m*TX5rACbWKCE6 zdeQ`v8m6>g^ugv`p|HY^#1QZrGGUj0^HVDc@{?Q0yhalbBEV{+|HzC^-{&e{5K%z9 z6Bxtnfu1!@Mp+Q&*&~;FOg&*Vm<@4b;{FG0-!UUXX!|)1w}op!B_|7_s~d(+=9Gba zKp8`LaB4D(H=cGcspJ_TjYaOwMb=sGn^gtUVhK!UI~2KKYEE-NC}F>+BEY7IVvy%KRvm00tg!Q`y=er}wpEetX}K@;}(}{s9AzV#q2@ zBy7}->|N?13POrs`;U?(qAG(I$~Gt+Rgw%aNZ_0fs_utVvRJT-7z4!@x36v@=NBX=IqkK{#Kg0w48de@?#Yb4M(Svj5=T+<ONr8-oh7l?Cji@+erqur zFhZ=9|Lk=$`c}v4u`)-!!UI=!9Jo@h&7p4RlS#u! zZ7-prn75JkV?VjptX;@$#`U`{vB!=Z?V`T*FBF>J?vsML7e6@2GbUteMFfX-TUu{2 zLNIG*;dV)8GV8gAgEf#)X3A>p3^CRka1v?~8x^anBhQ=L=LsOl=&pcOYHo98m##ye z34MtGCDK!`ptl?taGMr5q{!zVc? zG00e){TV?`YA9eB;(lA3lXI?RrB4BYQGk?vOmTIUJED=(`_*gtn2DB-t4WW54as*W zb2kD-lWX>lb$+W!VFakki>B^Vc+u$?NLF>)!U%b@Y}gYJ>m2H=^x0=nsE0TF^Yu0h ztgH8-o1%+jCk(+&`|)tTfEVHq0cMeFa{Uz)X$;fCq%Y=SOWML6bYfeP8j5hktL`KK z(18`XrUn&WN9PtFxh&dX`y~YBsmdhi7Kw%tKzM%^VEhdD<_XkulW-x=JN6OPbFI4@ zzDDRN+f=@{0h*MswwOqG6gJ?{NuHx(y-|FUGsxyZ*x0~$MW(eY>vqq4Fh#t7uzw=- zKB?|!0N~!h^AMdLa)oR!Ca#HZ9&Zf)ghuO<^RN)4twRlygHnQG(BE{cDc5E}OF4;xss6gYyV~EcJvJkX)xNWb=@yw!uq0v-sf^rvkp-;?DPWK@*SEw|V;IH=7 zfQqEV_>DjOPT~8X*J|H8=&RnzK4~S7ML~nLX^%s-Vqc^aWy7N$y57qciZGcqy#=zU zs8hcHiI=D$+RB{|62{ohCTiaML6FI4Uhzo5D{Jik@poCs0w7F)*w}F4r0sJ~#u-72 z5bK=ANt=M$Dh5NKnxGsg9NRR?WD-x|FhTwBjd zD<-K>44DB~i%frJOfnzh1R>PRY34kw!6~p3M$JLaD1r@`=h)~Ngks-(gdXh^Q?BTP zZ^Zj5w1AwtuR2$~E7s9iZdF}z%pv1em^V2rM{1tLUY@-+Sc0(9jA|iZWml1;v13=U zHf?y@#mb--7z6$ue>`qjhE~brk$AY-RG90~5wcBbDReXR2)pKg{L>;H(DI`U!MLNQ zY9rFJP@ZQ}jlcMh%WSCo%vf+nd0Gmd*F%KMIe>slCUh)8Ma|;M_I+v#;|ueg9oLg; zq2HtZX%&#F7vdpNlkX?}(C7dGC^y#NB#m4%69RzTNrk%4ol~hSI%>2r6B|*ZkW(*P z;u#s;+faHo{tfy+1L^RzWDi*^JR0iY(zJDB36y_QJ+|E-2x+cY z!V8uLNktH~q>WQZuY!Ap66WP|E!0PA1jK~)^8oJVGbspJs6QL!!-5Qm7 zHYI|_`Actg?vDzdg5{86w@GS$G6ANzff7->6i5pB$T4O}`fZ_;{217Om0gN5zTr12 z5mW{hCzCE-QubjxN$TAE-XgI-8dTY@OZmq`y+y_>dk*(qXF0{nam|q@~i}Utp*k{yurq(DW54hkDT4bbg z=_etM?Nf5W^o-HEu9_?&xEqPg^P^mTxLH8n%u$!mWvFG|{&)jtnU&6|5-`~eaNz0%D1BDo`{ zS1N5(KW5v^2eLdd_%`uaRndF@h0Uo6=M|8?b~KbOLZk{HXEnGmtgZXf2inI*1r%n! zQ3&%RI4r{f&dwW~HwH0Ked9b!k6{>_19H z_Ai>5IChDMY(FfMyG%;30?SQ{iV9KyGru62+Y)~qSQ91}b~}w<&*}R&1c#$O`H@~c z5)2S_eXx}M#N{MuGeQS9@#UJB@;W_j50b}jIhxMPloEFQZdvwxiU^RYycTzgK)-vl3LT&$L8~@68$C8~5_U{cR$E#w*x65(qw&eoL@>%ZHvj zWnEMlSh*(o&oy|J7eJ5OD`ssy%F?*Vp?`Cq;FShyl{ZoKCG5g{y}>usznni#8ki(i zO{w@n{iAj1_ooX@+s*!uW60WcH~*bNOT6z%0jVML5};wVrQp~`Uss_{cO2oud_nNA8^B$?07fJ6?iI)Q zuo9G)O-z)DqstrBqf>B%S05hf-wep0@$BFHKSrkZ{za3D)yVzRz)2{wf8(Wp+xyAM z$rtyx$gi3A=V~V!`Q3;BM0$>*VVtxEM|xDL^gew7ydy3Q6YzD&THRz*q33Ms_D;M- zbCx1Ft#UNB)V3bf`~{ImI72OTp^|bF8?G8#FRj+Biy8ET5#rA3sd|0FR@U(LAJ%w8 zS1%n8Z=Amhw)92rIsof=YVWF4jw&F*j1LG@-`+cR0-~2LqXRH8(Ccne{y#MCPncF64U`0uO zWmi$dlii~1D0rLR{qc|_2M!C$t8^=G7xQY)9!#Y331A|>N)EhmyVdLWL9I3YLJ`7? zZmpqUJB>Ni9oiL)^1IK1UoMyhWE{$9M2M6Xi zPKk7GpMsA6vjZbU7~i+u|J6Nk|Ci!Y3UMUT2|`M;JsNQACdJ%ooo9Yt{?A+0hMpxi znEa~~sxC>rKrU6bd=WRb;%wsH>A#j4{({&1GYSNR57Gama(3)2A;SM>qop}l>Jk2* zn1+C$fIxuwzg3mCU#SOqb-wOCb6mBcYlA5+mt<&_J~sBxc(GQtBFINUO~Mr7<-uu($>P HJ4oML2Lo<@i8BwbL^1~GkG`E7C$SEa_ zF^}Ea+#Je`Xy6;#D0FPnSrR%Y!QGA~NA^{oWmW8C<3dr{x6wWQ{4+bzemqV5W$i5~ z=J0jXZ>uZb>DT@0Ks?4QJ{`z?8JWl3$y;2pj#$XP*pv$>$g(z43{YH9KmmR6<#sIn zA`#=0#sgycaBQ^&}Xba!|KaZ8~b30v~nLt z9%#gz_*=~KD{3t^X~l>480*}PhKN=??g`RV|4Ud{Gyyl187MJ}r(#e+H$GEdI+p1s zq_25h;fV)$EPK%Dw-(G=f`yHB-_tttsC!?k7*#!|4a>`Ahj8nm?&n>NRs%jkZW^3-0P_yMP5&*6a26{MRj1&TPF zyE#|c)5uUHzMWx=rMKpuPih*V=S;W3MzIZTw2uTbr}8`p2bm+Z6Sa%vvWAWSf4H)p(+ zSQ8;EvUa#wqWV+9vmIio(%7wukK2SwjUS8Yl%Rq%=~PU)2$Tvm6`1!r3H@U#_|bB0 zmlT1PS3wPB(b&^+@YY7Y$n4l3mV3-X0$>z|gZp6O*Lhzn&?Gad2ZCF;+#95-Y?#y+ z?*l@Yf=a4w{Px=o!N|3~_XKfk&G;fN>Ps&dp2FpA~qD=0~=!NOS@B#XAKKkND>Y{4>rqxrViKD7;?>j8`R` z&G)3FN|dfsxnaI^!d1G%=>AbTTxZWo;n-DLrQ!sj=f~VAOe5zhGS(dgx|!ls62fbX zV@<7Ck^!}R=`Swr?(7w1rY6Nmq~sfXJ?TiKJLn=&SQdEt9$@0 zA+h1Wbwbri0s-stc8yVq;mRa6@kEf8^KXUz&jcic!+avDvvJFa>k0ioWug=T3oPw; zyj4it&0@>_*uI@2=^+T7sL1_!^aJW@Xfo8aC#3^WtQC7fET8b9C} z*u^ue6Ojn z7@(eskJ2+cNnH9~VyfIh<-|7!je~vGy*odz(sk-u$~SrYF3glruZ*W`{sqnS+9=;Z zh{D@MSG91%lr&ua8%$sJF%y1I<|e;EdfJykY8#D$Hc_81n5`$7;1N|b0tvvPLzSg& zn7!5x?T*@rQUKcUhTIjV(rw*5oQYlm5DbEO?60#mohHfbR$3_x#+PZoYi@Vd4`#YgKyTd^!4n{fN~WZDY61sAOm6 zl!d^i*a01QxpWM9Pcl?&{RgO}uq%ErOk5WpECvnfEh!*YP&1Sl)uTN4hg??Vqs~i5 zYsfufz3?{TtwuBN=`0~Qg1PlWH#OGG$ zLLWU17$v``)CE1cds_7kj8mJ{-+l8{DS|zAQ&3|qpOY=!J|kXUhXue9|H>4gqk|n) z-i34GmxLFj8asb3D#D&=ya*a5`C<=o?G;Ev^LV%;l#nH#O=7Nh@z1Do>j6Q;I5S2P zhg|AZbC&|c7}uSJt57s2IK#rSWuararn-02dkptTjo*R{c5o(bWV}_k3BBnKcE|6l zrHl&ezUyw^DmaMdDFVn<8ZY=7_{u{uW&*F<7Al6};lD(u;SB=RpIwI)PTyL=e25h* zGi{lRT}snjbMK~IUx|EGonH+w;iC2Ws)x>=5_{5$m?K z5(*1jMn%u0V1Y%m@`YS3kskt~`1p(rA4uk;Cs!w^KL$w>MH)+cP6|XKr4FfHIATJH z!EGAK4N>1yFR`-zW|w%ByRe#=&kA&#WyUldDGpt!wf-8SFWiSi!5QZL+l7*CE?u!NW1T$<1rdLJ9y3u{_zvHaM?#Rm4 zFk}^1!ffcrB|XK3gsO-s=wr*sUe&^$yN|KxrA)uW00Gu60%pw_+DcUjW`oW<35OC8 zq2{j8SgC}W$?10pvFU83(SL$%C?Kctu3*cs0aa%q!fjn1%xD*Jrm!F3HGR9-C{b?- zHp(cL;ezXMpL@0-1v0DMWddSDNZ5h?q50cOZyVi#bU3&PWE=(hpVn|M4_KYG5h9LffKNRsfhr^=SYiKg?#r&HNMi2@cd4aYL9lw(5_IvQJ zcB*DD()hUSAD^PdA0y|QrVnqwgI@pUXZXjHq3lG2OU&7sPOxxU$Y3&ytj6Qb=2#cC z;{d-{k|xI*bu+Vy&N+}{i(+1me!M;nshY_*&ZQLTGG*xNw#{RpI`3^eGfHck+*38NRgiGahkFethtVY=czJs#)VVc{T65rhU#3Vf?X)8f0)X{w!J3J{z|Sq|%?)nA+zo?$>L9@o`Kc|*7sJo4UjIqu0Ir~S5k^vEH};6K?-dZ0h*m%-1L zf!VC%YbM1~sZOG5zu&Sh>R;(md*_)kGHP)<;OA44W?y53PI%{&@MEN}9TOiqu+1a3AGetBr$c)Ao3OX>iGxmA;^^_alwS818r4Pn&uYe^;z6dh z)68T|AN=hjNdGpF7n>y+RTAZc9&opTXf zqWfK_dUv=mW{p_vN>|(cIkd(+Jy}qnK{IW%X*3!l`^H~FbAHwof+vLZ0C2ZXN1$v7 zgN&R9c8IO`fkR{6U%ERq8FN<1DQYbAN0-pH7EfcA{A&nhT!Be>jj>J!bNRw4NF|}! z1c70_#fkk!VQ!q1h2ff@`yDyrI1`np>*e#D4-Z~*!T^8#o*$V~!8bWQaie?P@KGBb z8rXc!YDL!$3ZgZZ%;-%~0Kn<+d+{xJ$stQbtN8GWV?MCJvzPU|(E(1z;rFw{&6vy) z3*@y%7Tx8rH-p$boS>bLyod?OKRE8v`QSBvGfY6f}_{Zo1q85xoyOF16n~yHx2W ziydUoYLkJmzq|n&2S(O!ZmLdP1(o1Jsq88cX)x3V-BK5eF&0e_0G!5?U7&3KN0`mc zH&Lt)q8!d_VgzxyL^(@xrbp2y)Hmr^V48));RSfE=*Ly0uh9!$3dv-vMZr2URf@l5zdwLjGZB zugY>7_fd_vbV*Qv1?H~>Z%RD%nEeFSI$n$$Lrpc6g>i4+XdBB!%zM$Bhrz5Swzyg? z$~I~n@~-wTBY3-T&pr+|gC+OHDoR?I(eLWa{Z#Rsh>lc~%u0!&R|s0pA*w<7QZ}{i z*AFr~0F3y~f$MGh_HDL7J_1?SxKL}fWIk!$G}`^{)xh*dZ5kK>xGL9>V`WZZg_ z)^Vm)EQK`yfh5KiR(vb&aHvhich z_5o+{d~0+4BEBqYJXyXBIEb1UgVDs;a!N2$9WA>CbfrWryqT25)S4E4)QXBd*3jN} z?phkAt`1rKW?xoLzEm!*IfkH|P>BtECVr0l8-IGk_`UjE#IWkUGqvyS+dMrCnFl<7RCgSMX^qn|Ld_4iYRldO zY&cHhv)GDo8nKvKwAbfyLR%t?9gG?R7~PSD#4D-;?F&!kV59O}neYut5AGbKwy-(U zqyBi=&Mgj|VIo>$u!DHM`R7O?W8-idbePuxiJMH``6c_5L-chKd}=rGC5Gfrc{f!* zWFEBm?l@_b7kzY7%1RQQbG5V<4=ZlkZ%sF74Q|mKOc7Ak7dP2#quiGcZ0_J%7Q?j{ zv9{WFw;n5G-Mn%r#0R;{jLt{yy}9J6rQ(>X9pJ`7Xy?Zv z=lNit#qXaq?CnElK^zF~sG}U5oCpR0T>FH=ZX}Prju$);?;VOhFH8L3I><9P_A|C+ z{;>~dk%9rrq(snjsEm}oUz2FQ21MCG*e?g)?{!&|eg7PX@I+Q0!hL6C7ZVY|g2E>i zr!Ri2@OfEu$)d52+>+cpgh6Z;cLYCZ&EMR0i<^~4&wEu_bdo;y^6}+U2GIQgW$|Od z_jg{O=pU>0-H$P-EOlWyQy#W0r@@_uT}Lg+!d5NxMii7aT1=|qm6BRaWOf{Pws54v zTu=}LR!V(JzI07>QR;;px0+zq=(s+XH-0~rVbmGp8<)7G+Jf)UYs<$Dd>-K+4}CsD zS}KYLmkbRvjwBO3PB%2@j(vOpm)!JABH_E7X^f#V-bzifSaKtE)|QrczC1$sC<<*Y z$hY*3E10fYk`2W09gM_U<2>+r^+ro$Bqh-O7uSa)cfPE_<#^O) zF+5V;-8LaCLKdIh3UB@idQZL`0Vx8`OE#6*1<;8(zi&E7MWB1S%~HAm%axyIHN2vd zA(pJGm_PraB0Aat3~?obWBs?iSc*NhM!{-l_WNCx4@F7I?)5&oI|z{o@JKd1HZ}zf*#}JjK3$ z-;3V*WJZvUcKvSOBH4c7C{fl8oRw8-vfgKQjNiR|KhQ%k6hWNEke(k8w-Ro| z7Y3)FsY-?7%;VT64vRM)l0%&HI~BXkSAOV#F3Bf#|3QLZM%6C{paqLTb3MU-_)`{R zRdfVQ)uX90VCa3ja$8m;cdtxQ*(tNjIfVb%#TCJWeH?o4RY#LWpyZBJHR| z6G-!4W5O^Z8U}e5GfZ!_M{B``ve{r0Z#CXV0x@~X#Pc;}{{ClY_uw^=wWurj0RKnoFzeY` z;gS!PCLCo*c}-hLc?C&wv&>P1hH75=p#;D3{Q8UZ0ctX!b)_@Ur=WCMEuz>pTs$@s z#7bIutL9Pm2FDb~d+H}uBI#pu6R}T{nzpz9U0XLb9lu@=9bTY&PEyFwhHHtXFX~6C zrcg|qqTk(|MIM%KQ<@j=DOjt|V)+8K26wE_CBNnZTg+Z+s}AU|jp6CFoIptG1{J*# z7Ne~l;ba*=bSwAMQ|Vq#fW~+je4PXA91YFzBubNF?ovIOw-$C-8=Ehed{lGD0}(Id zRe4sh8L>&T%{>8o))he}eE;5_ zxoXk3wX?MyNl-xF!q1d$G?=wp^`@09(jU&X zOqZIBI#dN`2PJNdATR3ivtub|nO$dulSaP|e4)WXF1YAGN1pDQIbIjXFG!oC85Mt; zW$eteoL{y^5t4TMRwP$jNPjZFpGsWnGe=jMMqKtcZm9Y9PFZLi*1p@qoKKub^T@2+ zk$@*KYdQ?Z`}<%4ALwk*Yc{(WTf@#u;as(fvE^9{Gk)lWbJP*SjttWofV0s?AB({~l zZI1hZVWFT~W-T?nfMMcnCS4-#6H-MU7H$KxD;yaM46K4Kc@~Q>xzB+QnD_I`b_l3m zo9pRx46b!p?a^&zCDwygqqV3epjs(s0NQI6ARA1n!Yy-qduipxQ& zUAlqRpNjBS+y-ZheD(!R;F}&^V_}b_gqH%tVZ5%%ziO7k^w=es+wZtK^i*vmrWNLMs{oWu_CIov|s1raZiS)>38>pYu;i+-t zI_DiNe6aA4KTZ2P09qPj(0~K4nUq^0+f(2$g`229zkG4jLzRvJUWE0oF1XHL4t3UN zDH466G56sy9hTZoAJB!C3;@F;ONxEk5u6Mv%zdo}Rq`=* zw1n7MOhfNSV48TS989ArIcj`C%Gk8~93~u>)!Yt2b4ZriKj9x2d`H2HQNJ=I>hkDlcZn zqRj>!;oRMTIOu zx|Zfsu~v76T{z7AC(jxj^c@tnJHZtGPsq$DE!8kqvkDx5W?KUJPL+!Ffpwfa+|5z5 zKPCiOPqZZrAG;2%OH0T$W|`C@C*!Z`@Wkop{CTjB&Tk`+{XPnt`ND`Haz;xV`H^RS zyXYtw@WlqTvToi;=mq1<-|IQ(gcOpU%)b#_46|IuWL#4$oYLbqwuk6=Q@xZaJSKVF zZcHs~ZBl;&lF3=+nK; zF`4gSCeZXlwmC_t4I`#PUNQ*)Uv&oGxMALip|sxv^lyVV73tKI7)+QY5=tEMas{vTD-BaTJ^*Y6gq~PU;F5X!sxqiq$iFCo+Uv7m%1w((=e}Vf*=dtds|6 zbX}91!G?C*KG03eHoN}RZS9DJxa&8YwNCT8?JxMXyZqZr13NA|GB{+vG`08C{V(yy zf*Lw$+tYSU_+dI`3n{bMrPdDb`A=Mkg!O=k>1|*3MC8j~- zXL79J4E=U^H=iBLTeHE_OKzE&dws8RNynsSJ!d;`zK?P92U{f)xvD7VQVosrXZrL+ z6lMVdD1YgL;%(1cq{#bS6yXmp|DS@nax#AqqlZhtUQdh<^2vr5`EpAO

LGYq)sa(w9^3-f}NHy=GR4v%t2YZly3m1G@5y`xBh_HGrD%f z>;|Ty?9FiJAc&UVD(StT4I` zfVQwxhE9bXE6r2mKO8Ag7{L^jCyqQb0QqKDPE=RAgqn8q1O^>(z7h5kE(6va%QqRZ zkIOmp(})rLSS(2{=C12e&@!W2=Jel-^_R``0xHO^+t!(oXbcv5yhD4g*$t_F)_5Dl zSVCgesW%;DtYPCFs{G;GX_o?1J3;QQPPv)rWw;>} zJ&KwnUqwNXloNXlK_+pNDfI~hON#SokVJb&ilg8d7^NWo2ZQymCqQMnjfi>ePibjr z-Z@q!?RGN$Mj}Nk){X_vaj6?Mj$>ACR*z|6MsXy3VZ^PFn@yHkPo(>m(iWepn8SC@ z>D2;R4m+gDRZ=SIX!b+CP(qE=JDIUkn=D$aUu+Ihn9-+k1LS3PreQg0N5eWIG@x${nC3v^7caS>1!PKNAY9J z#}E}Q9w#SP>(GY7Hbj&z4$Li6o5taBO|4+F`yS9zq*LJ<38wy4I>HA9(&GYrk4dLajKGww))BWli6Ln1A^Lda@N~p+snkb9C z@OthI+<##vp8!HVQT4Wk(=@zQ{OvZ$EKWS73+JHb)eYLGD-cqi6^|vd$<+IHuc?Nq zW7JertT~3))4?J|28n$I@nAD0c1%9C&IVhEZX~mUsf{efyS(XNG%ch;!N~d7S(Ri7 zb&=BuON95aVA&kLn6&MVU|x}xPMp7xwWxNU1wS+F6#y}1@^wQZB*(&ecT?RnQcI}Y z2*z!^!D?gDUhc@;M^OpLs4mq>C&p{}OWVv<)S9KMars@0JQ{c_ScGsFo3BJ)Irg++ zAWwypJdTO-_{Uh8m(Z!3KL7K{ZZzKHj;{M8I$mV>k znTM?sa0);^=X^cglL`uC+^J)M7nEa$w=VwFULg~%DJllw+7dJAj3{qnP5i3@wr7%y zjXp?Wl2%Th=my&3u?Q$RV6N5tzKMSPTsc#J+-cDDp~qFB6bL2C8AS7Y3PKtVhdhl) zIaLqH5+OnWPWSt(lQCgkN8lczc-V%_iZ{>#1%Z$N*>lu#S;0MZ$T2Y8Kg!U;hAZj> z6S#%$DQ_`Ic%Zr@?}GgjRXg@qTj^17n`65oJ@Wj0u1X8&+UVd|Xs?J+i_^GZ94m6= zUc96~Q`OJvlKB_Lr15*Yw_PUPEr?f?H&00b^-W%26mD)(n(rGGNfK9~2h=C>p-7BZ zFd&*&Msdu{w~(eyFOglwCPH^Rb}O(N7LtS+nnEwDx*pGD?|&9Si~M43a+*L(b0$5A zv`T`(G3xO;I_sx;FwTP21ZlfDpz zOo?}Vlgf~fo{YWm@n_JyD*frOg{XsvBA~|Tn4V6hu>Gd>89-rblfVJUaGvj6X%NZ} z$tFF9sx=4_$*c~G`9iPLGh@=sV+O{D2-t*K@J7H=`V+oVt}8?04WwU3h1BgS!f%1P zFak-T#7`TtLcR=Yz>g0R!ZQrH!YiZOQN=_V-UyncN1Rc18?KY?#O`v#JK+pq0K$~H z3D@v9DZF42R)b9#BBX{^$DOMlJ!g)Gc za{o-1e%F6NvgKq9tC8pV+9S$;9*zNv{J*)n&dmf~anP1)4~N%~h#c(=B#3*KgzhCKhFdgDoWi2IDog{RVyzK|Y`rCUs3T~pJMmdZJy4?b z&s5G=zhf**(t7Y^oC_mcTsE-{^}wiaoUu&?kojLKs>SJPxjcP>{a5CbXCx92AcBE) zHtqP}LjZ{W>PH?Tu(E0X=%{PBMW@F_?#7b&#!^q`<-5$ur+-q6 z{dn=(^UZw6*3-XM_(=@<1_*i&XM4=0t5u!gm6 z{UlmNGPKgO_;e;q9|#esq~Sq`<}%d{+sRmhvsA{5i*91=tub>OZZ%)xUA#4q$dDyy z1`w4%?OPLg3JeZb#cqSMO?*Xn%|-FCcuH2i2fn_{IFusub6;NQdN|7TD1N?%E8*g? z$apAt@cEe!I%jB=*q$p_3=t_5R0ph%{qaq+QDg!c99Y!Xa!&oDZOeis_ot)gNXr{l zdY$|So2Qed2Y7KMNBrS^E169kG%h<+z{Z_p_;shB!uY)>yAVcK=&!bg`lVg)4T1|7 z0}7FpfydVH4F87K@c!nEG+WGKm{Ouo)Slpl;#qcEIQ0zdMfLA#;dBxYw;p;KoVv6| z3_D5&7rJdG12CnDSvZUW?$UC6^UVSW^|vw|o-_4bz)(w5(3AiVhpeT(|=f#x_}E?s#qHZF#xA6AF_ujl$G z-jHD%q(d2}v2PhXx&6YWps~m(^+RXl91Q#xRRJBhjKl$FG4bk);|ag;ieUZ&!Ii3$ z(iGz1+0m7#g5>ASldBbNZL=ZHh=tmmJt$!71; zIML2GhEz1pg@1rQN(M^_691wAGkJ@Pga_05WuQ6! zG5RkGY2^`@(H~pp7&Ga+Pwh3L!Njj!-rc;^bTIfo5hP@H##1X8xUZJckrx>id`bAd3QUx9GuomqBYZ!uN1-&o zvTxC?;p8vL67&fW8fw(YOqt>L@bdLrEF*3OgYe$4n4{ zEB40LiU#6-0@5jdN`0w}N0qi@c0~oT2FP z)LNk&a82my?jv(tQpiMi$TK_L@lub#lsM$R{Dk?Ya@%%%huZkct~tSWM714c!45k}-ZLVA-bVM`>|_ZBbW_m-7| z3U%xrAhi}n?T(2F{_n4EZ10inkIFl#y09?7$uwBoJgqY8vylwev)fDOn;>0R!aEnV zBz%j0Mqpx~EZU3q@%+oV7;}|vt7$~ou@faEIq{p?FY$XXg&6*K)b_LP=}gi9`Bij3 zN`zEo|B6*|-;>S`rNa^BKRDbDAk>X#MsR`EvL>6bqU@SaDDs z8>bu@3YdRaWs*Te@G-UHjU%F~kTHw5(0PVJ+pwh#ha2u;DB+UMo@A5UYIl#5rtBV- zGX_hIpw}3C@H*Us(Cc-d#-gNrG#w$(9+S=GxO>3SR`SE2fHZ2KrDc#_C^$jI>Y}#; zMwY=R6@+dWi~0RXw(c@3GZ&%~9K(q&ee0Zw;pwL`E_tZak-#8^_b)Dpyi73^he?xV zXJ08&wh5-M&}qy4f7!D&=E)puDD(Nmg1d_(j`4LvxM5x_huNg-pGG%9rYqO6mImyJ@}*3Y>^3OvcnTG%EV1) zq_Ap?Z!Iw__7#D=pOWnQN$gB!Mr0!9yx|g<4icJh{cFOu3B8}&RiYm+Mb;VEK``LK zL(NcpcTiGieOIssSjr?ob}^``nNf&UcJhXyncO9m{6gD$kqSD`S69(aF8dkWz5>!9 zBLe4Sib7Hs2x_L2Ls6Ish$MGVKrGt5+_2zCyP1byaCg3upo+-I}R4&$m)8 zQ7|jc1Z^VWggpuQj*cP;>Zo9LS!VSzrqmZczaf;u`d0J(f%Z9r%An@s!e>n9%y=n!IZ_tVGu{Jmsbp}Fk%HJIU?a+-~bjfLTuH|JExA8EROowzr zqW9{YyZhR0a4clRK>1I4Ncx&WER~{iE;F^$T7K%X@3PGOA%6#Z%p3TS^&M;Dnjw@i z^o!$9nhcsmcHcY4?4j9+ofL_CWsZ4Hcch(rjsGfGD(nsH>w}^ERqGnz%iGj0j{g}h z7wMkJ-2Z2~eS>2!i}0~B63i;>SyFJU2+>VCS^AxaDOx%g6-t0eM^P<3+*z`ztvOqrG3)&#$K?& z_Y0wbWID47@cU`E1A6A&!`aZk0ZE@z-h#l1NqX2#`$Uev2gepW`rf8*!=rD5&;Jb{ zl08rU>dPo=K%-1Ao1~G-@4ve~y5#9E8x;TE0k5d^TC(=Zc>mwjW^c=+U-<9}b0ku~}gj z3sbW>R2M6DR!g#NUP;nxo>)@7*=RP{U18SDop6b2&PHce^&h97@xx3t+VK+!keE#} z;(Uf&89as9k8{$nkLbuB!-d7TP`_VJpL^Xs8OKB~ri$YUbW8fch64}7|0EWoT(TRj{ z*GT<7Y<7DsrCi79ZsM)z#c(!nNOGySOCkY1fAuQOq12&iUVC!a`#O;dBLf=d?&4*B zI~LgAO7E0qxK(uRTM;IgJ}+z^gD+bi-6I!3x{r9`l~%8TRP%UE0V8E*Sz>Nl1NVG<<7(wDHZ+HcOkQm$O&k+vyx)y)x{Pz!U8hS$*m zByc0h6BUI*BOpuL==P+H|Hx%`>7!W+1H!l9vi&)`V zyn2o9{z=lc+VX*!Vh~SF=)L}Z40XeG>LF6cP^b+R$NxSeUqbK^Q*UTalKzP8X%{9@RSCXm_NhF>{=S2 zi}ezam_^P`S!!-cyEW9y7DBbK93roz@Raccy*v}?mKXScU9E_4g;hBU7}zSofAFda zKYEe?{{I54 diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 9355b41557..d4081da476 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index b740cf1339..f5feea6d6b 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -84,7 +86,8 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/gradlew.bat b/gradlew.bat index 25da30dbde..9d21a21834 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,6 +13,8 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## diff --git a/lambda-layer/gradle/wrapper/gradle-wrapper.jar b/lambda-layer/gradle/wrapper/gradle-wrapper.jar index 943f0cbfa754578e88a3dae77fce6e3dea56edbf..c1962a79e29d3e0ab67b14947c167a862655af9b 100644 GIT binary patch delta 13895 zcmZ8|Wmp``)-~=Hdu)0n3Y-8OvyK$p9^s9MM|Aj$miotNhy-{udLczZyd9uWtD)X_{|!LhIEF9y8(e*Z zW>^w$u&x|i9OjL=#6Nl~*ERulzX>8C-}o;iSMRYdfCU5d`~U{V4>HCg0HG4Xg2uP;fn!>S9+>LbuWbc0bETMQfo9~h}yI*TSv;Oikl~t-+xqI-`P$Rj@yi{mr2zC~s1snMT3!OPBdJ%IDnPXq+pl*Z>=+?qo${lkCSKmwTlVjfb3thU6B8yFjr!tphOs*G6 zwL`RyVAUXj4p=9&@PpWK)m+REuvHaq838TEhY^7W+JAp$ zZ^y;8`Z*@VqJ{sFFj?<|7SKS@G`$Yi)gx%nOi@Lr zCv0IJlFz0bP(eDIW(uWNq?;8zEAb+uGgnkLk;y!4XhA6=Eoa<`+|;6mOq>z`%ir@z$4)Mkd3 zF=hFo zyd{*bRQ4YUe^bU*Y`__)Uhu5NIjVJ~a}{lHp-_7wI?#EB11XcqmdY>pk`JJ) zW9Rt!tK=N>fZ!UDomwMnb`0EOvTjcNl=yW@$c!OAg*=l()GjZwSyJ+o^;Zi#I5*uP z$6qeih8&g8E(pNSneK>93A(8*%gvwv!0V|SqGcj55Y7`=N*@pJx_ig3uVuf-G~LJbm`7nxNcZ>Jgqy(LTHu_C2e>STp^Pm{}f&^)XU}vzuU`UV&>e& zqsXNXSs;Wri|?NhCq0vXC5$>9Cag$adyWz^x@NCiy2${9Dc)Y;J8k1Z933W$3$H}g zCQFU1XwzGm_WUheXvnDisH_%BdzMgNwk2^mHcQu*x>U%iN*B^8U(eVz1~(%`kV1Vb z=9T0xmN?bQMyrrd?u}jer}zV&sCK6zSm!zV8A8dP6THF=4*V{_K*E*K<)I(Q^(eV!m!vu##-2g|G z{RB;{gJB_X-w{ANq?ft_n!@=O8_gj6FxW&zO$7L3@NjWt@R{NxMbpHLk6;=2$0P5P=kKc1_85inX z#s$&s0zhV1cz>nRb#|D#N8Z-=Tphm)sGH>9cz3K3I)6XpimJW0(6$GtLzN(YPu9%R zdFXG9|30AZME4r@joC0IdvBBe08mF@+5Dd97p$h=n|pi80Cn2n{ev!S$llPGLqHva zZ3*OmW%!Qj>C$F!Ffafl7#I_1(gz!aa)b{ebU*=yH%^kr=~N?|2&2Df2o9X=2B?U!#R#+Cj45=f@=EcQx+9J z=X3~A=zbX29Fqn23m3dm}0Voj^Q9BjI=MiG+NZ)YCYn@r^qv(xE3=)&i z=(ML301=rNTptvUt2tnsPb1~G*DWFWoZfv)wV|uNW%?!)jju`jN(K-0$JYi!ofNup z9K%_ucHwutbZsl~vDQ!Jtj8uI6WA6K--@?8+_=t>g|kgUeC=w`IP9m&*fuoO3#A;t z&3@=3;J0>yjM89?h5MG$S`wW+=vyYOWQGhIP`^vScM8^JL{mGan5uTJPvAg$0z}8; z zhMi+S${H#^wF;eU-0UHJDo$QwXDjm{ns>^ltubKXtd>6Bq-=ByF%bHu>2&e&uZj2X zgWIq(l^;Ab7#I@h%#j1AtBIkB`GO*y!i;1K+_SZ-p}4jmP7#%E-=>{ zK(3*ObyAgDLnbBLObTWJWNO7<60SK6*!dD~_7JOWTB*}(*X)ox0{lq5ac$ABkcL~0 z9qCHT8^`QIe_4-BW&mIe*&0VT6w|oJ9hnOO&oZUe!rP+gStQ)h5ZPhBprHZI;So+g5}&;adp<|7#r@DG!wXmtwdwy=7i>a`x1D4 z_N$0`Q)>zTVUU%@RzlG=4Nk1hE=_klWj|6aj`KJ@S`y^%bifkdX`s!A#|mpM-x;SF zg;bju5cA0?a}%hk=3AL^#2B>5X(TSne6PDWY5gRVvn6nKl;vg?SIbv^Uz=+4aPUft z-$}QR)+_U?eX*p)V0%#0@S46_6c($OJL^bPj0Ij}up8}In#GQa&Cp<#%ZPjx(^97{ z8AfEgrNRTg-l9WJrNJzHx1EkI<|n(P3VIwFlTvMxfe=V&NL)4MubdHqZF)&Eq4`+% z7z;>s(sjUsebUfFF;~)_%@3BDl8i085o$H!*yBv%Z27d~)|jfg4DhJ&nMb((B#4hOfeBhL)g+r)f%2be?s2ox zT3j0k+Va^9`gqO)FoUV@F|((*vGxN>?5IlvC!BzW-8cyCy_)Fl8W+eg<&Lz^s>dJx zkly@2Xzzi9Uf%|1pF_Nz-3SgOx*+ShK(x=XUlP?;EfoDqAkkwyR*yjIcD#7-@=|Um z{T+V}q`6)wnSO#*N#Hp8QT7^>6R+H^_o4LBc}$aD^@(1!+Y54YF3@A|Cupsfz@Wt8 z!KwmSb9}3l)u^Y+V6W6(bL3hk;XTY4FNy3hKhID#Ep#xLM88?`xT=lw3xsgN;gKK@ zqpElV*j#e;{w`OPYcb1_szKUtRLygjq2ldhGJ$8ksyH(hF%^w`&FH|zlDK`DfuZ_g zs}!{hMk^~48&b=jWqG2*^m8?ERreHIw8dgR`Ugj*t4Uo`^U*56MmU<^ zNxcuRh+Kc2>W~lzD8S6}Xho3s9f}{o4@tIc)G;lKXi(HJhZV{qSH1-xj>P2$NHEK2 z)TjOy%>(9Ot_zPO)^tp@AsSNd+`R?}_2Vd>=eT{G&TfITkeW@p{F+FTJf(n87##z& z!%w+6-!NJ*?9Z(hbZv^BG$Y1`BOo~*k7jaZ)9%@;H6F+W!Q%IV4qSM85; z0%xWZi_wc=CCc>2rd3Rk3C79_rJH1uG?yFIm4f6Fdmts<41T*;3ek&p z3(NaDK3iIDa)MaUD{_;~fMV6obrT6_K$c+eeRBJ7jd)c%0jldoJX`EWz8M$b1s|DS z)cr6)em!+P%GjM6uQb6CQ!FvUb%_>qbKn=gHl=@K-Z*6_VaD=;!?P9pr$Z?6NrB%a zb_G4M-UkkhI>H@+kP;eS4p->q_f+&(R^7hyRsS9Xl94vA^AYlM%tdNdHQz zFQu?Rau!C@&&Dn;i5iEhn3`y>{O-m^_*h+Jp6C?D+5yn9Vq5XVQoUe#BP3}lqvHa} z@x~UctaNE9PwnRg6+15NJ5k(PC0dETm#QxXY6&uTqupm)GVrsvKC9o)&*mLo9?$Ot z!SFjh+!mr{kYE5A#urFIBv?<(6-HtqfprK#3H4dylz5j`Uc)Hz@1}A9OXe=4gf3_- z$P|^SpeQ89xlL`pftC^4tO3N)JXTqmkbruGAsraU5Y$fyMd~L3r3t8-SfkX{n4<`@ zhBKAeBP_1Rd8q`<3^dio2W9^9iYW?#m-!IKDO7ge{vC%1Y>dWLslyLNrm-!*YU3Dy ze|qm9gwdCJKZlwcvaoV%S_%X-k_?QIf2zuAG&32WtJ6NDr0i+<{w;CG_St&I_7HtR zTiR;!)_1iw&#FKwAGFuBze6(_%DLu?>|K(H5bf{br_f5|#qa zNOuJQhSU1PGQ+dltC{ik3sA?PcKcDJg;_^-LCcLGo+|3VsWx0vMNOpKz3*U1wGG0{Z@O=3gt1Ay|67ZJC zGe%Q2bP}rYtE^Lc+ybPES@Snxwlh7Ydq$c{H?d&8e>!Dvt=dFxeS0fvt=u3$KHuU; zKHr9fCbGGQBeJ~@{wdgJi6Ah40fcT>yGRWEe)%=j!AaG~XDaHNdzsU6*ZJ2XC5>lv z=IT$K4yEi0xt7i<^=rn-$1nOKKRQZ$7df4uU#`?ddlH+Oo~+H_Zq!-}6VK;|?PGiI zhbt$ffNJ|--Bn6(L{pZ#!&ykjgBXEs%hmxg3vB~;GMKcAfeq~#2~f9vw7{>?pTu{T zcxLiHNCP}pJ_fYl3^gBy_}h~U`lx1^?)q|U1cti6s?Nt*RvSgF6WD8U%3uk zwC7lEPg``Bjt5YXNFE!^nq zJC-z}n^zNvd{jVhiv9aKNd}lH0$n97EBjb`Fh+7~amqAtrK{@Sn3QZO3BBiUIo^n$ zsiS{+L+8B0e&`mFnEqM!LCLnzlclx?UwZ(L6!FZ$b53#xA2caP^zn&!GVtipn{W`U zvN9yG-?@6)3`HYt>E;wO*N_UGd``TDMJ+e<*WUe$SGeaBU)dJHbvUp$J?}caKfP>U znZQtJY@$~+#6FOn9R6m86Sq3iiaaWa3kiz1k>ntIk2*6R+6gchFxKLcBi9EMRVQrl zP~vO=WAFX7o6BB76*mwH?R^-5HX?KAu`a^Eplkmc zSXpmBvQ4t(kVfyQIR#|Wi7PYcy+x;(5j|LOp3()IiR>2j9**}<*nO2NiED?Z;)iGh&PH4nB*kN{VVt!lYX*(jAlnZkabB{Fa7)iF?pBFk(T+)xyg(Y5TUd;DX&MX&_}`_=Z_KcQ9;Ok=&YEqPyVul9sRG%P!*byO8nRS# zGwOm?IyLaeqMf=7AGF{L7v%GKmeM+;#U;vPs0=0R1WAo2JIq8N`PGDe}Q zt6VP!Fqln^U#5ZJFp?b?d*Q}Ynd3Q)jTU;{RwiqDncXA=DXTWhkWhiR{XF9aobJH{ zEYYt-`Hdwp@ZQ5$_i&f`=DA1D>lgJ>_PkLE6#)L#3R1Giq@XA zCLtGAgOI35<3Y-&55pCx#&@_R?w|x@%3$Q-X|@=Zhuo`C@cOG0@M*&sW@uXQJz-M; z=ZcUIw+bXwCV+k?WF;Ugyrm6gy8KjZmaobl;Omt^`!m*(!@&}j)uCT=+}RbLo7WiC zM*7VJG5hnkugII&>R-Jyx<}$pNBtEizA`Gn{GbTy^WPi*o!^5_gH8ME&+{<}nBbSA*p<6A z{c--0SNgk{iH@g2s&K3L#wl5fR-H5$YrMAEA$gwfPC&GdtAb=bUk$?Md6^mdF&^vj z+iAp=tz8ZK>*?)QgEVBG?CnAb`($wf9*1w->8@)hg(hpH^%IFjGqTs7<*jz0J-*C! zs)=j2cA@=KgS0+*LX^Qe*))69yFm;(i`r6`?_p2Dfi!AQh43;ix#Kv8_*W|IsGg;f zJ=0%L||IPz~u^1P?ZkuO7VD7>GEfT=K*2JP!?hLF1f0rSkXpoIojW`}iLv zt$qt5Kd$Ty5UwS~N|w!IW4-TDG6g9!ecEoE+JUM(=T{d4yASY8>tlDG_XdEUinvXN zl>XB_*;iM^53IG90-1uxg#z{ov9M-y`(|4~g#J?dVQ&7tJ+a=N9npjr(_lb@G$v24 zPeA4UfgSFXLSe$Ghn!^hh)2|+YuV|~a}U+Y9iy?b*TKn*`y{ADmlq%d|HzJn0mW<0 z5McIquX})(09`s?@%4OLy)I^TdiKP=%}XfT`s{oX5eauP0FS#ZH3$bT&E#E)1%_v48Kc&JbnK@KR+fCJ+WWg`;cXecj9ij8zP$MV%S9InmL z#D$p6%KIKx&U;|#5fPg~KlH~fC7Sh-(Ut}5+tSSriumK>DDF&sl2pa_A|~tu_*8aY z(*Ud4=(+k5;ke&7V(y`$@j|FGqk0(WA5Wc(N${j@=7U}Xs^XNgK(<|>qug3-b1T3( z0=#Hgj}+TLlDhVm<>&!j$jvWXm6SLkMW&2k+;_u9Tq#<8uKtToJ3Q^==VQ0eV{+r6 zQn5p9xfHk@%P_FbqYM3DFnxUSXF^sk#Ms{)T4quYP`fK;T+Tj&gRl6sm|74UbHHrF z7h!QzEST^cpRO6L8_~zXNp!niGl&79$k_8RSj0W{xMrR)D4`>~tNrK~*s0gkO-PC^ zu^*~aOBQF>qG>`%KGd+7W{nGqd5lc0%E_*&rn?MObfYvgPvJ%vawv{il#Km=$-hF* z1V^<{OA_t~X|u>{5ljynGhf844dJ#q31&xuibhPgP;6z{C2qw67U617_1*$=(_{mu z@T$|cK0GIz9sS4`1VcT=#Rqfsfiwbly-A61ih$VWK@T{K(t%VCA4=VJ4(eT` zLP`DnbAKO!X02C>qoh6kk2SEE|nQ8^J~0S)XyHMI1`BA+8Q-{{y-|Sc=j6N9xVnV z3^giq-U}tR!`_$ty{geQQ}xVo!CwzlXx}-}k2&VU3u7n@(1G0xP$36j1GKVJtLydS zm|^pz&9wE!Q>OWGMLY+Y?=$lIM$IKdF`8Pw)uhzhmFGtIyWl(qh0C@9BbzwDR>rEa z2gc62w3u1cW+De8tCw(3SQ8EK+t9l|ef|)GLRlRJz>SleVh^o zSq>XS(iJr>IQL-5^9LMn-MBxnO*FN{K2{7JVUpW5nZ{sz&_Z(dXDW?G7lmn%1nU|B zqC_R`=83Y=g^uel37AnfplTx)W_%O1pY@^^#~MgJg`0^G07b7RHOA>7K6Vzom_M3= zbD)3(BXXoqR6UFGHM9a3uK)SxX-0%jvKG23)#s6{vbq>#o$1tZMI5hU1c`fGME7#Ij+u%*rdsnO7yaltUc zz)OZMW*a=_Q|k2CFQ+lR%Md1Kd~``A8LX7vMtOupY7HV^E*;7o5$|Yq;EZjl%s-BLWa)nM| zOY1bfH5&%ed5t0h_`z*>GNiXhoMBw9+W7 z4U!O;)Tz3n;x64wHcYoivoslIkj9IN05|H7X~GWEx-k619Z-KjWv%8@$1wbIvAFfI z0=AQoH{3yl1z|`pSg$(!>x0)nU|wT@4i`lCchm_nrU@Y;XR$D^5wA!Ftl}*9OwXFZ zai&Zh_YNnlz=LEccY_eUXOEY1;q&Pd;dLtf$RffP4%P#4ZyIjV&0;_13^ zIVGMUzx+5jLyq55_Qz0jPBx~-{DfuUW)hKduk1gv0et-e(ZN8;IIdhtV$3N9Bg((Q zw5eHG)FFs=ewUwfdHfvHb$&&i=h{#epIdWr+=YE9)%453DlIOHLFX;%dv2LDNMrMZ zEWU|CvEYY*(2SE$Y{jAd$QU-wd*Hbe5yO+Lu6Ux|(Y>L}E_jNPR+TX@Ch(#orbP8g zv+Z(oKz1gylHHGKB*FbdpSh7VBM2KVmx2oj>?q8|s72`}5s)jT=s4;lbRw$cKh+N{ zVTxW`s~QW~rRB;e|7pxFoJ_Vm^eVjcddUh0Xp(NhCBZ@Uya;(x_wkvyH*^ds{2_H? zs*PV?33(>MyJC_<)JC=|9II5@I`QnNGgZr z5AfQVuy5}nzXlGQGV~eESn9UcL_U$gw(QjDVEW4b-o=BQGBT*a$1Fk+4bm2n^6m6w z_hn7X46IDL7iQZ8s+_(8yX!fXqM9htq_Ts}08b%snTZMmP}{6(anfizqhpR1cR61k z=sfzRN*!0HP{Z76PDg%PUY)rjwhuy71^5D3f^bR;(fQe>3U#zrWwe0OSYjHZ-eSJV zuKnE7`~*u%-HShx%*b9ZPU~(Rg=`lQI$;iBY#2k^6{Ef6e9D&EK^irorXEpE!h=>^ zVxH#pyrndMgk)Ff-ke*RFsPY@B3AM_;Kj`PIJU@EH^QsIUo1wdl_wfqd48O^9?06@ zt*>img{+gG%WiGU+&V)`jeJUPSDDLhd#nVrUr~dURh(&O#gMnA0dEg-#?fg0Wnp#P z;4QjL{Fv?Unq!!)POdN%ZI&vU*Ww};bqd3@5fb_<7mIa_w@U?X&ed5f1FCQ@57aR@ z)TUphLPht{?j%;+T}Sfla?uiG26R^?7=x!#CUXw+$_TQx_%vLhgg8LVJz@{QVxH;M zGcV^6&Z%`yWalhb>$VS`{^Ex`w@cldtZ8t!!exC zu+Msuk)M-ylAjAz8{yA&TjgR`O%H1H0T&$<*+K{2-<~=1E0~C+w@CzUg>GyIegmx$ z$vp-I6CygcS8Jm9rR{Wt@W?<)IdIk##3DUE741Dg@lQ~Lskm-7=|2%)&XCF_8|780 z9d-AgO*4e1uf}M3*FGo&%&eG;OB^Vm_x8i73V3P?d^qdJMvO&{H(jgc?n6UYZ>-FU zeO%|qJ%xvB;o+$e+CHm+Ot1UgzOrX7_G!pZrt%?TaOs9ZPg>i>-gg^Vuu6p>LEd99 zGlCZbE5(oNfEP{~x>KfOZv6XWA8zfk0@R+{;r7WV?(wWFRaGkg&mR3j$wJa7CBWz= znwfnWiE^@dC=n6jrAY4vvH*;b5{E#wK8AoUW`vT3W+8gyt9<*hPl1ID>F3bkLniI?`*u@J2zcd_cAH2?L5O|qzu1jQs$J^g9=beD zYoEgyA^AIv!P%D3;3T_C#zm7j6=+ACjtf5->)lXATb2p>g%qD7L1EbTMh(z$4oMY) zSZft;+pfN?a7x#%4}(P3Q)Gvt1F^8eu9}_PDW&}_2hhqjF#&SGUnz^`=V(U{;B;`G zt7FmRinElmq%KVXaBZL$+hD> zLe`*wO^B_i5W9q8#>l8J4;5{XbZg#@Z9|D|{gN8}jF1XBNzpi*9R3+-F)w8EbJ~In zEdim4jC?)`IzcZ1_`5oBWd#yPJNc%ajkte>^q1KY$#LzK)`jz_7$%1`N1_tdhr^wG zp92GvW>iDG)!1`I3*Y3;C)Jz7**nV;DaO_d19A_8qX%OCf-KY-GEZ#Nv;2CZQ*ht5 zY`vXc7yAb|?h#Z_dEKDC)Wp}g7hJDlI>P+ctKoq`U4!4az+ECGUSGmfHRpW&m_%7? z(o7gajY+w(Le-L(_Al|yQIvl1gk&lX-5BMZn=+~n-N}$`J#2x5x&B1EG{drVp+i;- zucW)%=6bqw%wNB|=k!-_k($v{gQB1ZX`dn0tu@(Z7b0$g5k88nHYIEE zT{wBh?|8X1yS1ITl!hS_>>{cobd%i3<#)=amBnHn>p;m6f%!T!BSP{_9DL_Wmv{PtyL9hoTep$i_uAr>^@7u^a($-HJh2k0xNsYVmt|v+kCWusAE%8~f zgZeq1{C!DL z7|_)gsX-J$DBwOYs|TpK6>I&l2*#dm_B%7y(JCJ?jaOVZJg!;eleEd~bT^pJkrk>q zB4)r!XRL!mow*tX6z6JA){(LgKapsISwxE@P|Hy&;*5I17ktf2EQSu$>0G&bDc^|D zoB?VpoqIQzg72DO!zOL#jXEsFWVZoyX*Q+>cyNC5+bi$(-R z2PXnAH)~j-X7q#KV*r7K0Tj#Pt=_Ix!xQizqfxG}vfg*swPul)E%ElLW)2B0BOb4U z$5{w|1BT44k;f7uS&T@0UH_mBvgr?Q_m;tun8!5sqbDu3_a@H76e`xzggnje$~Vo7 za$jN9vO%&+?c(NFBWd(HH(c*Tf3txzhrnp4X1859WXnbk!aVPy#xl`hJYOb;9$6q{ zkbx6NHJ;r$;+CoL5@BT|)P$#Nd4mLhJ?! z#V8L2#1$FDnc_k5#=YeMy9&SHkG_wJOT1g%-w$u1eta|QD44f{Y&WqiWW218tS?qy z$ZDkAwNCgrzLY?-u2WO8%SB`AO_vLdwg{s)2>YT(Vp}$u)h6yDPl(o)wFGQ6GTv9!92`>rC_Xgn9)BKfMk>B0lFK$_ux zk^my^G@g^?|Ds?LnEwzyJ7qzahke+uzE$SE-IhBwTL zCnKg33>Lk_tsV;Q?3Nd07IG)>PA43Q@@bD_XViZuJnF+-SR9eSm-b^YbLCU7PG6GQ zJKkO|*b;^O^%Ehg6e-0+bze&Un{k(1?Aom@b7Sm z?b{}WJ!Zfj23oRMKPiLEh^qy6lZ(sff1?M#aP;~C;P0@AuUam$iHH$i(Zc-_8++)) zGiB*fRHaTE_*K_lAl+<$IklN{WiruTjZ?Ir>rocinb-6%~rZb)Z@l>WsZ%cVnF`u(k z3MC-R0(^u8vlUE{9TX~VYef_B+y~v-T`n!_ zJXHL4N_pJy{bQGCGEJ2vO`^5M=(MU>=QoaiN4n$ZmlEhRRC09~b|CV#QExkR{!cxv z-Ih(Yq);JB({7Iv5SqD14A&CD>{9d#mQfp_-1nX*824hiHi&jI!rbzk3^mafyBi2I zXwJzh@J~^n^Qq+Rev`}V%T)Pds`2QDUxGv4pkJOaJP+l=87o}7L-RV1V*p70%Q?kQJ!b+v(*=vXQsHF z#w&NkJNb4_Kvu6hrx0e1Q_pLru87EM%Rez`mTlk~vCAr;IKZqQ$#>gK{ZQNJ$F@r9 z17m<_yD6oKG?O@e`O;WsIhdWwE)Z7*SyABxHvKJ!x|y(wVq*Eg`D2Q%Q#&zSm8c_X zY`zJhB88q%6!2%9%}+RQMhWH=sbw#8{a(embAwu zeRHhkOtBY=U&ubKu7vS#2DPzJ+WbaUn%Eu`p1cjDEU*&qFGKE(o%RZ13w1x?o_-#{ zj3y3uOaJI8nlJ`Rt11>dUer4~gzlg1qwk_n+`w_Q&I230F}#e<84l6$Ub}ga5BLCy z$uT-aXsHnb5x(Q2(qiSxMHMrLS5E#p#t6L)COeA@Vy#t82W3I7zxNN*jGG$^^A3V~ zTr=^dD(liTi!S&uFU(~grGKHPJ3#7Wm91!jh!*X-6-6}Q?cA`2ld(6Q{A_nw+16`p zBq**{Pk_!LEyI8)FurdbBN-IqyhFR52Y9f)rE-#p}V=M?A%c$M#J3kjR;+GEA#vBv7ig$61YKjN2FsuXxl6YE;g-oLfc3d7ixb z(~0wjUXzRlz7@}MhgnS+FRey=b`F|l<3w;qodOa{(-yU^k{7Owq0>0sq7~my3O9?# z;MqUiGm}Q%_f`tMUWXlWG>uF0_?>-d_6ru!DNoiMD&X~fg!7a0H9Z%=3kwQs-Q1{g zxIsDbEXG9ly4o5M4LODy_vvf8k1Dey9QW4T^up55&l zkpg05cG;FhOyo7R#xy!3{&xPzXTpzSZpRkB&$uR(?99to5LDHD?ak+~^R*OGg2wFv zUjX`1J0_eHXV^8UJXLSFxSNPlDSRKCJ@A^Jrtp08!98KQXBT1L%avWTv-8l?va+Jq zHqd)|JwByFcmK%afGyJ=rb@ELtB7tehaH#)iRz5v6?C;mDxZj)`upc|y>)S)VveGb zj?RG?$-D;ms{Mi9UTajprUthRTIksl=OfjZ8iD{zhh{YOLQV$~PKQE~HHn!A-`+on zR*Vi4Qpbff5whUZ9dr@0UMy^6)_zH48Tiz-RM+T2vk9}rr*_Wy-CfoxGjcedo-{zF zI=^!G@*UT_@;VTiU+I>Ht{NTo^Dj&T`?{QK>&9s}PXt=TxQbmKUDW->h6Eh)@|}uY zfxqy8(^9cw%+k#m9NNz`x+UB*DrrBVuFm%-eo5kp!74OI^qtOcOgmD z8KADRYxrHr>DeRsuJG&}MumPmOimcRYf)HcNZ@n+9Z>VwI;H|{kuzD-~H{S8;hQ?c2 zjtv0GZ}PmMOMCz*ca!f8t!=)0eIWsWjJ71-P|23{TZz8yg7Kf_uYY%rfKs-#-mI6~ zWDtv=K%3NLAnu*Falh$e$sp$0L0w!lpwgZ9QTM+QD_m~`Hwd`>zEy>8mki>B7c|Ao z1M1j$C*t3TL;k-)g!W*N|5no|$$~>*LSlkyga9DKJp_ntp?@6S+sqXOyh(8W{uKnw zfCBb--`KW2G6-skzsABWLHJMO%+dg)|G1h+znMw@zb^du$snNhKu5aNu>aTVhA9Aa zypI5ZZuUl#f&d5a@?81@G6)V!kn(}ZTjkqZ1;HA0Zp8~i*?9jK@7DzF5Cwb{M0EJJ zdFQYCg$>j{ouh%B3M1Qs3=ZGV(U(Iq2#NQ~M^NV>2IYUw?*FKE|8LZ9$ASPj2hfxc z)|-fz^uOHyRf8gcfie7#JF3$^?wBCp5zhlK2f^T{`>T=fi_P#-dNmI zGKjp)zxq`<#rm&d{*P?xe});I^_TmbiV9SEit=9}|1ST-{Qv(9yx`vu!D0;he=gX+ z0@?prp8cP``iuSvME>_G8=t*R-p;@1^t1OXT=hnT^!!D1c2WH6hj~s0Vcqu+jSSK~ ze?K{$!~Z?8YDWJup9~X#I?msx!{h`2w0@2N(KYpMNVp(=<47*ZAV}x_uET;%E(l>n J*WbtZ{{Z#P!zlm& delta 13442 zcmY*=Wk4Lu)-CStgS)!~5AMM=xVr|o3>Mr6cNpB=LvVsyAXspBcgQ1o=R5aaest|x zYp|Ud;3g1aLn46!*8mAJI&Z-nf(`=#0paw?iVYg# zKUs^o|DOcGK$5&gPV0aMK}b!cw=e}1HdMgiC8Pg8*>1^32Z5FfsER!G3mZ%qKjJOpfesiQ2!1wa9roW6I&DK_t$shg|m=c2cE{QdM|NtSH0rXoXzvmNP+5U2LV{^QbB?sv0VKm95!eQeL4~+?=ho^^MZI zi4QY0fsKBbqrOh39Z!#mM!z2}i6F-BHKbV_Q&qzRsaF`l1Vjpm1sC-ZseEjRhHlco zfXoyCv0NC5K}!1s)zB(Gd8sKQIBYyB)bFK(2G2GM&K4S`>_HR&4tr1?iRab0FsEbp z*Jv*zm^-fRK+ctLcyDjn-afw<1S1jM(4q5ykfHQzL_}qIFL}{AIQ>4(4ufTO5LOPw z_jW{#M|)nyUycekv0yq3ALu*Gjx4MO>bHe*!#3>nE^vCCDgcN>sA^k$Zux742g7MRGS5YWh9J!2T zS<0JF@`%w;58G&U(_V6*RvcGc?)SP#I!b=^l;;8|2L56hb1X6;bd2imS_1e~0c%T; z1T8HGf8HR3ELFmM^n?Su6+Q7D+$t^=tIK-pWi`W;i!lHwI+jG7m{1RRjBU0~dzp zhN*kX9bAON4=>l-DWvYo*J$Q4Xp~|yYTaabShU@ns@lubZE3xU%6MYv&e|3AuK8?k zu?#J5JQ%%TJ7Bb$Gs;&*)*UAk%Oo-5q=+2(Jm zIuppiu)ZJ9p`Q{Ox6P5{rbDkZk#-Qv`%KHjq9XiNOUl8kb7aZj*E~>vv^dbHH4oOd zczWr1LJT!^o_(O*2>j}6lOtE3Z)Pht?L5pyzPpntJ|r!%j z5uggS6oZWkpVt^698p3fEKA&|+deWq)ldqZGKG?a|~=1V2xdW$8-mayFlC zJWmagu;BBJC#|ZHrUXfE&`4P20AGgWC5=H0HjYm~^E~OwgAnMps?;#CY=ahb7%?H$ ziejQ`%0Proz9+myGwpEQf^)-=KkUK?uyDVM9dcP_xwRPl?asXN_w$2*H zua=Dr(GFqiFLl870&u+1P>>n@QI(3gk(rj0%e8Ar$G7fdFyGel0{sZrPuEX12l`k< z5>lA+*xaiLY{Vo_72dq>E!s&D_ z0I)&YzOCXkxi;^DvcHbfU{x!;>3?+f!px_0&rPIW~iPmIG@n7rmiC;XiLC?f3vTJUz`Gg=p9 zK8)mv-V6dl|9;(R_$VaJ&lBtE0aw!=g-iJ(;|-J>nsF(42in0{Gp)Wy}WNr3llis^vYk0y2t{zC9G7SQW8GEvz>ZPi09E9wH*yE=+9`RdARy$??) z&b{^h_aIn=A*FNBQ7ATjvh&tjsQ~1FV3r;lW1~f8kh24Aagu#Jxb89ZAs>t(Qw(FD zS|S=1m#oMS;Dwi>0@KkG0*-OHaJb4?~;#3j^WrKgCx}3YozM}uF#0{&QFMled>Mo$+hUe%lY}nvK|5GwA1fTy@ z(^KJxKj6OT*`H=XLgP=vBF+Dn0wO;EGz7>+V7(zo`X~r*4Zb>n+<&CFW^ zx;O-Yo^0{nqPJTC5S<;>8>L{^1C9Ql@|#RETigaBa*_pJOL-@W8p+w%^}Gv*)l3j& zWma|3USri z5Z(cKy3rMvzZlR?nR7E6wO%( zDf&3(AqN7_lQ~96t?KD<`i5K_pH$aIxYeiWm}ICd!1&&$NJHxywzKXt0v0W~ZuFwG z5rq7KRa$-&A|tYU(+b&T6VxMx2Qmg$O$VM!XY^ciTE+)P^vMMLl^U-ySP1P83$*2u zNcQ@)+ok4pN7x{9Z?XBZPr*Vr7wr91_FvBH=xc%RZ4TH$W+0R#VWB0Ua`8O;-2Pnqo5QG!{#(=RmvtM({fuA>4ai&IW$2`P<|D!v-qs^RSsZ z2+y{qc6(Io-Ywwf<$c?(7ay7Q&wZ)JAdk<#iTYCy`PaXy(4aeKd-6d}u}-UT9jad< zPB+QbuZWqQGTG)@?W;;TDUqxD9Q+ao``pz(B`&cPTFR3|P6fz8&WRjU<4 zKLyJI>Cm{uI!saN=y6~Pp0Yiw`YLo6*z$^aOS8b)G@I&C3g&BsS$8cSG8QK(iy>kZ`195!*f-ndgPIM}p9?J=GYwFDqRYmdSymmgW9=>uiSN z{#DAsx#ke6UQ;6!o#~HR_BN1VnmUn=c$;LY0ajlu+#0J~E8a8UlvxiJ7^)K-FrJE% z<2gebNA1Z==jc$B(7~TXXM6&Q)3pToSPkWWSOl$HC)oA zgNe5(5xkR+BQco*Qiy6ns0vv|LP>(bx@_3vrzwIU;zwexl)cvpL>(yu=LHEOokp5L zRA9~H_ysBBuJrkjur_&)92IMj*o{ClU=^%$`6*Q~>ISJTt7*aljn)-ljW+BK3w>s| zLN#{_x{$hhj7jvX2)Uy)P$0MUVAnPRgU&7jijQ%_?AODC$j+(yrkEJnuiw`IZ7!R2 zPB4GAo_x+e`MWBlrj}-+i-p zjlo(;u36|+c@du3o(ChHTb!CNG1uvA!k!ACwEt{gFz)!#yl79^=yNgIS(ucgbSZVj zR+{Nqx!hUAVk>-}*j$=WTI$Wgh61lQum5C;c&WKWY;gwydc@?bv+*)FqXm13fAnj~ z7*E%gV-~u|mTx|mAw-ZO`Bi*+jS3ZWr4V0~ zh0jG$(j(1RVT&D>u$wVNqIc}P&MlcPYg z_5|^fraxyhG$cMGT+&0SEe)_*oGW>KQZ~0~Rq(Ly?T1~r;_P(>cUwlKd0k}|K>BjD zPqf(ox&pVUNt_0FAu<5Ry?hfTydm-bPTF3CYZH!1pu(4}QAR&!8!uXdc*_CBC>{%1 zA#ZnKhO=T2`m_g!lt@+#fsRc8DFky1Glal5Y`)UPr+ffyzIo=U{^j>S8)Iva%|F%A zGycyWb;bAUPc@wa68+gwA19vu!9Z~EZ_QRl-&-LDp`8Ih-Pu$4|EZ)baFvDzZ+qHA zEC>in&_*!{DEABjn62&YhoepMyX%-^)Evr&KA*^%h@n}5{G)gq78)|*fHeX)qcQ9U*FEo?pAZ2&Lq&Gb-n;6#E_Xu)r30J;4{Oxf#|W(TISTm37EaLAz)5( zb1#?ZZ;q%NG(z8!JPil?M!oqa`W!eDy}m>{b|!``@2#VCMt(D7+2Uyh$(<&;@EQ{J z9;IF1P;>@bd{rIHJhxo+R-ifU(Mvyf==AfYG4+z6+4Q1Ar=nOHUA`Ok!e3Kj@w~@yTV|fh zG~45!>b!@cwCpXeD#8WQ?o1;`s8Gotuz$`fbvPoAP1e|d71`QPX&ZV+oBm-u;`HE@ zym&N?*)l!sMsiRqUCH=ki3ME&qFxMUJEEzrkRkAmSMOkwUCrLg(Ig%_Sr!ztKfZ&I&V|;hkBz1&x)60kft|N;0kXv~YbhB+EPM4N&!QS#}gP3tLBgQpm6pCr<>GQPu|KzFkk@ zOl|mn?>(D2)rZDbhsv1rnmK?{HP{lsAt^U^B+7vBxyOSavbz-KuGLmVO-nU=o z6S)#sswKHb>egmHw;{EM^SRV1M`pAk%gw4o7vPVDDKws)dfEG=5Opk4ayvRjWd%MK zXYcoEj?$jD=(Zg5!X+}wY2~0gxnC&q#zc-9wV0VW_PZP2tztcR_L@_n9AKCBu2fRHnbjeyv<*yJx~og`}k@A0HvO@R|K|$hBMLQ=WrVx>{$Ar3jVpsHmuC z$t3qeB>3$4EYSl>!zj&+H1r&FyDogkkYpysdb~}}mQ$u9=gVLTQ=Ns$4fWH&Gy=E_ z%CR%}(Hu1zm@)A~It;A3Re$W4q#uP;pyBCK6ta|7RTit)0mWh==&(r2UnTNDxk6om zmC>MJQS((G-uhP&ZPN^6Ry(Rrvz$XAhg$K8((*`87J)?Ujsv1THp9U~zMz*LJ2W|s(*ZTJ+2yv_eH*%dgVNuT(K!EpdvA^glL-!ujzY3Y z`KD{RAk{+dBc8b1NkgVVuh7c{#ta>ikwf9R&>BXBG@;6@!IJ8s!{^!TOSnoiXhJKq z?$^tc4t>w-N4X8((semr5<}q8VoD}!Pl|ZIk^JZ=leGyf(d(I2BU2>tl34u@7+jql z4N!&y&O_{Zbr!2bT8oPEH#c3eTM8Y6ab=2t-SM_`QpwW~PL!U-RtbW$9TA_Y9`}KQ zIm#;}*G*)&@z!0tS3P?A^WhYQLr zSy4ZZ5rI9~P9E!9?O~2mtyH;!ESE4k4@kzyhIRzCqRn~`#JT5k1Y*8$8zo4k?H~CF z=kwf&U*-m^wM5Lnx-bI|b%lcR0g5_8HsTc`$CD9QTdkZjx~{mG+?Fmpm=>yMB=5rp z!d|Ru`@?G2Kpu)ttD7#&4(`giOjCpi@DuC0ftdE2HAgVQY!X#HSTvYwSZIlvIXwJQ z8|!>2H#uIGlyv;@QWAKhAIV;3HzHTWzLYdyz@Rn3$xF(}6y`f2O2*-W=5m1`Ts3JXDuiYr z6d`uOh7w_AtN~-(cK;qFotu@Cr2}!C4)Mmfbmo~F$bUPd9bZU7p8bTd6>_dmBH53< z4^|H}aUq*qgxnNnJ?$CS$bK(GbLfnWmY8&GM)SB4&z#XOi3IpYi84+{|@ngymx$~Rj(n;X6$p3B%0|6q}h`vw| z5P-LTue1EUBRM<61|}yNC}WG^gs$1N7_|QquUfm;ERxkj(nHF?7$A@fr^X(L0Yd+JlyIbivAQ_WnVN+;*y|^d-o0gj@Sj0@Ll9H0=1@hE$Hta zR2PzZH0j!kKBea;ePh?Jrz9Ko7nOq28iGI}i($3?7&Jc!m;GLB*io;%#<2JUVUyNS z!x!dd5#uN<(@nza%(Q+QY+5y16l%qlK@t)s6jyvV^GzU}5{h^k#n=pC00#k<0GqHun4N7jH*p5NKxwY-`-poyrq98zAIn(Pqelhp@wBZS z;VPUpIZzh2>BSRb$Z?b~p?EPDjb#@KnB}){l5^=Naz&X^lrUaq`pipVbPx&kM1xpN z6F(xQqnZQL23bVMsk6$`?ca%u_*|N#<8zPrmThWVf6KSa&6A2d5O?dgv*@;Cgjp*B zq9km)rsQ-BmlK{>#^X~h*KOtJG(cw&oGPG2kQwhrr;VYA)J|^_Tgrrk@v%jYPrQtt zNfNI58EA5j9B%W{vgy!n`D;ueZJM60hba*peuxnK?;^EQuvlBbfq($AfL4p?fFBY4 zH0I_+=o&hQ&ljK|L&sGS&1sHDVe%tu)bbFl9j zT><}db*{&yjtx=~fNtE&hISi_2$bbgHKcne3!$?U8jyO9f`8uLE93M`HT*Vz6ZRT1~`1F?D!-$WNc;<&((Ib08Ag&yg|t zgjctZts}}?Z4*NkMIsVgJ|ZmJJcPXWHXI8k&Q;t;h5YLKm8n%R?^nsGhnP=8*y={8CBq{b z{Z1z2l0k`Rey6&pI09&?tw5cO;>4>RN@eM;5S9L+n!_|Sv1%ql{6v*EAj?yZ53f0e zGuz;q!pFarb_lP-92?X@yK2iBQ;9w_7OK&>_`#l?oq;sGg&;vunv(hKK&)jBGjxwu z@Kdut>cI;O;%x00?ndE2=bbq|pIxuF6kh^vxsjCt#~RjYlIH>zABUiYp4!%AA4{6OoRsk@aiB5-scca{ zgAc*xCz9H^EL)%*w$84D!Nm3-fZNkzve)G0*kYJ`?d zIpjut2dLm)=AZ34RwGb!v*GfMJf3||p%&~r!JRCSvmq2}EZT|TU?LW<#WEpSedEKH z9rtUHv@iE7LQ_c-f8H1-Znqi5p#pMe90Z!{VAf*dI)stltyRxJvofFk(yti0 zx|9WUkxLZkVJ0Wam1udF5}C2ce5Qug{)O+Ie*AF8Rv1#EQjKet91DYB#y(b#(fqxD z=vSK6#ca?)n&qt?EibeHleq-0r6&V>JLM+Sw|sprhxy8nA5LOrEOzx@et+=rHfShJ zXBp4>%&;4QGXd`*jU>amD8M9P-G!n1X*1*#@TeB03U;X2eat>Nze&YfGYg@L?*?Yu(P`DMIR42wH#Yo+>sAW0hA$p6f!s92m}jI%+zHV@~WpCT;m8=%^DqO zW|QW@yFWsIEu5wBkt~^=L1}fQ&MWCTUWZ%^n+FxEYE&eo_{k&hvMGy1Ca`awgh#=pynJdeU{rREf6`K z((@f%xEN&nCFyJP#M;K$;j{2-z>T|#ZvC_xM`?+X1vDf{lyKwxeBPPRdLkF-l{ z&(J5~U}ZMBvu8z(iVsZBPqjeE3+mAUt{@d`Hbpx#TlcruF$Zq(v+_Gz*1q%Cg0J$b zMWqv)I_|9_JwTh7s6NVxU@S6fZ5rP*(b;?P6W#M|Q{E%HF!*3aq8ZM8My=ByJRL_H zIB|FJLP+-G0rGRa%}pH--cJA`MaG=)el2nma18yxjp$ePRo^pqHhNFtN}b#Yu-G|j zWV6RBb9UZ16LPOPM<0hNk_U1n)~-O>v$k)+5iV1a3$HQSx&#Nahs319%u@A(zX5fD zSVdp$R9X)pb`6ayC_94ho$fEO{b`m?`*5v73IQ%*^kBH6Af!-`iXg>&@Ti`J!j!CN zqZ=tqJ5I;-t+5^@=@Nk)boU~N=edVvmmizr$_7cy*AqEy`naa4JCM)h0g`Batz z0j|PMD9#>RO=h(8sRzt1$QxCWuK5yEEk0YzBLc*B8CA_|tF=SP-u)Du$}6+$f{C~* zYylAlW#yhgHyzX7HR9N!Egb}*7{*O&+yw|Xt1d<%7LsW`dD@@74_EH5Kn7D(jhyKR ztLMrI5&Z5r*J_k>D73H^;gT!1`&99L?U`qv0JX&t)xEWFsTEV@i260l6x2!x_s>cx ziZADsDqDN*uO#2{u1torx59SQ8WH8~Hp^ryB8iiR!+Snt6CWS5B?UWNNYc|k>`BD{ zYp%%pIdp~ixk4jVw^H3+fmGirFLK>JfB9W`WprPYwrcV-Rp8qQaQ1=cGYL(V8K7uZ z?>ThBDUxb!^P3g3P@%`n16g9n@3O0J_ZHc|Sx$3=765keIKkMTW?fE`?l(j>Q(D}8 zQeP{s1fLD^F80G9W}~+%!&E+771NZeI!*9j#63ozC6Cq{T4Y>PkO61fyoOnrTT}-v zSoG#e@#Eu}MUm9d2MyH=&hpcJ%DzrGwM2r8sOqYyKfE#eabL&ktLQo`!@2;cd(xWh zT21{``ca`~=^|5c0}5Ee+#QZCT2T+zi`WXMPq1hKjYA9vn+#WnXU(^~L0GU&@Ke$; zuTt~8$=y3*MW{$X4^_dI9c3Z@s!?)NF4{|P7ITA@HNmcI8oHsVU7EylK>KEm78ma) zzv=g=vvQ9L2@^f9$dhf5kDAN))XgGt=_S~1uW`j{fa{a>hB?roaklqoO^aeS$|15X zLS2;v%Q5}uW{+H!rYDB1Wv=w3f7W!H_)^wjm%UP9D}{n?@+r64IwvOlE1ZG(sx8 zxP0lDg_&q3k5(_$>3AH4sMfaF!*3Qd9t0-HH}GiCxS9Ovett?pgkD5~Jr9ZE_b~^# z@@px>rOE}(h6WKV{1nvaZ8{*FHdl4yLh$n<_Wajh@-}ws^C?X0{-QP*|;bR&Co=D@zEYi&qyMo2H@C8da2rC z<@+vZn_uzIsT&C$g9%}5R|&KL7ArBuumo$#kTltOM#2?LO==v=9-(-pJiebc&}?(k z9t6WY7a?z(Lk{pcnht7Ix`EcCdu?XDw`B0#G12gftNye$S~LKY0hNgAlLarMO=Ehx z`1I;djAMh-67)+g@uy&|bh}bWe0Q0?Z&vUVv>>J8Yz=WqQlzPp1Fn8I%+*V4eBAE? zusO)vcoH|M(>vwgf~qA&;OuG&DyBc9Ipspa@;(A>ioPZpEy=tV2bq8mrVVHArq5^U z{R@**&ZwMh2Hq3aX}jDDEk$fg2@(l1*)Wd>qPW^Hj)T>0-Wvp`t7X#q2X@I8=19_N zDN}0Z_+Yi^6TDyldcxyD$l_tj=Vm5u7>$nZ z^<)jSSGVaVI!{W~yjC+okMRu{T;rFWkeYJgpw||gr{RuJ0;^l6C%Pt&voP(cJ#rer zN0`58?^on)hG`iEC+jch$#)#US-(T{S(W8AnPcEicN_$zI`%m7daOnY-xs&sY;}FC)Yyrd6u9s{NWom+mGt2+hV(rC8#Pz zcYNK#5?|CF-@ia`@=hIGOQ^U6KdAxRLAODx1`Awqja1}EbJiu&TRiP=4n-ZXe~43c z857Upg}*5HqFOb64SYa2*QwA4-&&6!-w3^fVC^IMs^&E{tKt%1$$rk>oVValmdxEY zLUgBo@R_j#n``I0Hm_N^>3Px-#P}GMsK!)hE+bh_!N*{{;r?U6WR%UQgCtYjOyUR-fm)Fz1#Q`O$cqA*CQrT4pC-M84+$g04 z$Z<%t#eKQ1(`*GDHvBjAim5>_l;j6PjDe`&FV`43)CWJzn`-jIG)QszRz7u0{hPy{df+b|8lfD)Sq!8;aufj=wu-HojGV53sOYStR| zGb+>GH29hTC&2uply=Fl<31%9N5lD|+wU&~m|sS}yTg)=aW`r=gpT{*9mUnB(&AywS|~%d z(l3)6kI6A#-P*IiYE$@9UHv#IPWEqXFN>S7PP}_G)SXp8r7*v0s=X0dm|B*wdiTXI z%-Tw)^LTL`-G^?m#~g;q8=p<}t0%rr&}x*;zg#GJ zqU~g9JQLJctDdT0VDZ!>q!Jll75s@26bpqw@MqXZQkB~or|urqc7dE6bz>lXRA86} zI~Y#-(bq8WD@NIc=f~QgiIbi%e*OTmtrBVQ4&m3lXp zi(BY@`7@P!13s^Uy1twfSI%{+sfIyBlBT*yeZ*xxTff{{`@IEPz)uB7e%>0oxT9DF z{qRQoI=@wt;QEmY<7?hp-x%rXBZOvN6``+)be&QS=UoA-6L5NnTCWL)q29gC% zd%M(1&m*zE0vYWt86O)s+tNJw+Ez=TVqSaIS78%`9xBw@;k+=;J~Owq#|dm-qw}sa zizvtY1~d<2nvST4eRX z7Oz!)7EL6Pf&bdPq*f2rwwoWet_^TNJx{~JT5%O_>T33*I#laoFmX?+L~9sEtGS?Htoj->OE7d51ez z?s43UVib0q_tavOp?pr3+FrX6LM<_U{S62Ck2kQp;*Z-evTy5;o6m7T=FNEkGQ0pZ zOpe{Y`4d2$Z{gas%pZ>e-5li~=l&mqpV1n{TNJn^_D_FdjrgAkY5mRm_cupko#`!d zTGxI%CLjYq>+8IK832f5L-?PZkPW)GsB**b?TEZ-{dRQQ{1YqS0zk)`f3hm@03eAi zfw$;_7ywG$5_*ePNC2RdE#6J#qRuhOJS80 zkhqHkRlo__pr-<{?fw~q>Mj*j9uH_^mjRT!`)3dvd;sLP*9HFm6b2T7)^|nUP>MY& zs3yU`X-<3iZ@{TA0F<|f1XVBm7i4{p06&7VUY%a#`ck*E~Nf~Py5twAo&3m6qDQ=Knco|gZo$P_6ASrfhhFp|AoH4 zLCa=u5G6>({6AM9XaxWX9wI^gwgkx>iocx^-3Ea2pFz!9gK7@{Ox?vH6;ZM6|9@@6 z>XV7Ny#<@Qn~go&|Bd8rsxbinr-Q(NI1!t-1!W!)ft-&1yndlz2LQz#Awi;pGLG12 z|MR{7b$UX+Jq?0}fMEMq4gpaZIPD0^@56nw4B~(koe)6e$8i58`yXrJ|Hyti|05&( zcjQ6GR8V3bf8o^=1W=X-!oQS)=iA~rMuMXD{FerL(*8@Y_yRzBCrD6DzW>q~et>`J zDIfs!^^GnA{zK!ujr2GX075xMf*MHtS3?fM`&Y990)Xt^=qAu#I{K9MP1A5n1=X4H z7eLSa&xNC%Q9%V{|Al4GaQ|!g|KsZUpW)l){7wIwgUTg9ZNmCL9O;d!f1Zy^)lttY-EmuCD*Ls0=TtpgKnWo-FO+&mW7kxx<=g>fwml$x0zy4h1{{yI$%}4+M diff --git a/lambda-layer/gradle/wrapper/gradle-wrapper.properties b/lambda-layer/gradle/wrapper/gradle-wrapper.properties index 37aef8d3f0..aa02b02fc6 100644 --- a/lambda-layer/gradle/wrapper/gradle-wrapper.properties +++ b/lambda-layer/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.1.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip networkTimeout=10000 zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/lambda-layer/gradlew b/lambda-layer/gradlew index 5bba57aa19..aeb74cbb43 100755 --- a/lambda-layer/gradlew +++ b/lambda-layer/gradlew @@ -1,4 +1,3 @@ - #!/bin/sh # @@ -86,9 +85,6 @@ done APP_BASE_NAME=${0##*/} APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -198,6 +194,10 @@ if "$cygwin" || "$msys" ; then done fi + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + # Collect all arguments for the java command; # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # shell script including quotes and variable substitutions, so put them in diff --git a/lambda-layer/gradlew.bat b/lambda-layer/gradlew.bat index 084f02df4a..93e3f59f13 100644 --- a/lambda-layer/gradlew.bat +++ b/lambda-layer/gradlew.bat @@ -1,4 +1,3 @@ - @rem @rem Copyright 2015 the original author or authors. @rem From 59c9edc19058d6eb3b68e8c8f0ce84a3d4854b91 Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Wed, 20 Aug 2025 10:33:28 -0700 Subject: [PATCH 30/83] fix(deps): replace deprecated Gradle wrapper validation action (#1159) The latest release of gradle/wrapper-validation-action is v3.5, but ADOT Java is currently using v1. This PR replaces the deprecated gradle/wrapper-validation-action with the new gradle/actions/wrapper-validation action. v4.4.2 is an official release. It's commit hash is #017a9ef. https://github.com/gradle/actions/releases/tag/v4.4.2 When uses third-party action, we should always use commit hash instead of release number, for security and integrity reason. https://docs.github.com/en/actions/reference/security/secure-use#using-third-party-actions References: - https://github.com/gradle/wrapper-validation-action - https://github.com/gradle/actions/releases/ By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docker-build-corretto-slim.yml | 2 +- .github/workflows/docker-build-smoke-tests-fake-backend.yml | 2 +- .github/workflows/e2e-tests-app-with-java-agent.yml | 2 +- .github/workflows/main-build.yml | 6 +++--- .github/workflows/nightly-upstream-snapshot-build.yml | 4 ++-- .github/workflows/pr-build.yml | 4 ++-- .github/workflows/release-build.yml | 2 +- .gitignore | 2 ++ 9 files changed, 14 insertions(+), 12 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index cea96c8cbe..6f878a0573 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -50,7 +50,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Manually build to avoid autobuild failures uses: gradle/gradle-build-action@v3 diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index cab6cc17ac..ed461109ee 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index a94c5752c7..1f8dcf4609 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -38,7 +38,7 @@ jobs: with: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index f2fe9df20c..d24f4540e0 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -51,7 +51,7 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/wrapper-validation-action@v1 + uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index c5e3f2f020..6950bff120 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -39,7 +39,7 @@ jobs: with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -76,7 +76,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -196,7 +196,7 @@ jobs: with: java-version: 23 distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index cd9ccf0da6..408419033d 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -46,7 +46,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -136,7 +136,7 @@ jobs: with: java-version: 23 distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index f068d52b6c..14e71745cd 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -31,7 +31,7 @@ jobs: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - uses: ./.github/actions/patch-dependencies with: @@ -61,7 +61,7 @@ jobs: java-version: 17 distribution: temurin - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 # Cleanup directories before proceeding with setup - name: Clean up old installations diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 0221866cf4..b73636e488 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -29,7 +29,7 @@ jobs: with: java-version: 17 distribution: 'temurin' - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 - name: Publish patched dependencies to maven local uses: ./.github/actions/patch-dependencies diff --git a/.gitignore b/.gitignore index 150fdce9ee..1f49e40488 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ build/ .DS_Store +.vscode + # Ignore Gradle GUI config gradle-app.setting From d943a1df79c9e63c17cc66b49d0cc8c0e71e5747 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Aug 2025 18:17:04 +0000 Subject: [PATCH 31/83] Bump org.junit.jupiter:junit-jupiter-engine from 5.10.1 to 5.13.4 (#1152) --- appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts | 2 +- appsignals-tests/images/kafka/kafka-producers/build.gradle.kts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts b/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts index 3b0a88eee4..d7555f4c7b 100644 --- a/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts +++ b/appsignals-tests/images/kafka/kafka-consumers/build.gradle.kts @@ -31,7 +31,7 @@ dependencies { implementation("org.slf4j:slf4j-api:2.0.9") implementation("org.slf4j:slf4j-simple:2.0.9") testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.2") - testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.10.1") + testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.13.4") } tasks.getByName("test") { diff --git a/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts b/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts index 57154c981b..f05d099a85 100644 --- a/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts +++ b/appsignals-tests/images/kafka/kafka-producers/build.gradle.kts @@ -31,7 +31,7 @@ dependencies { implementation("org.slf4j:slf4j-api:2.0.9") implementation("org.slf4j:slf4j-simple:2.0.9") testImplementation("org.junit.jupiter:junit-jupiter-api:5.9.2") - testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.10.1") + testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.13.4") } tasks.getByName("test") { From 6d9b13af32f55094dd8efd5fb20d2deabf54ff95 Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Wed, 20 Aug 2025 12:08:16 -0700 Subject: [PATCH 32/83] [Lambda Java] Merge All Code Changes from v1.33 Branch into Main (#1112) This change merges all private Lambda Java updates from the v1.33 branch into the main branch. Those changes have been merged into release branch v2.11.x with following PR. This PR does the similar thing which back port those changes to the main branch. [Lambda Java v2.11.x] Merge All Code Changes from v1.33.x Branch into v2.11.x #1114 https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1114 I performed a 'git rebase main' on the v1.33 branch, reviewed all changes, and completed the build and testing process. The resulting Lambda layer generated trace data identical to the version built directly from the main branch (excluding this PR). Note: The changes in the patch files are not included in this PR. They should have been reviewed and incorporated as part of this migration: Upgrade Java Lambda Layer to 2.x #1076 https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1076 *Issue #, if available:* *Description of changes:* By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/main-build.yml | 2 +- .github/workflows/release-lambda.yml | 2 +- ...sApplicationSignalsCustomizerProvider.java | 24 ++- .../javaagent/providers/AwsAttributeKeys.java | 3 + .../providers/AwsLambdaSpanProcessor.java | 55 +++++++ .../providers/AwsSpanProcessingUtil.java | 35 +++++ .../providers/AwsLambdaSpanProcessorTest.java | 140 ++++++++++++++++++ .../providers/AwsSpanProcessingUtilTest.java | 104 +++++++++++++ lambda-layer/.gitignore | 6 + lambda-layer/build-layer.sh | 2 +- lambda-layer/otel-instrument | 2 +- 11 files changed, 370 insertions(+), 5 deletions(-) create mode 100644 awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessor.java create mode 100644 awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessorTest.java diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 6950bff120..191d545ff0 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -256,7 +256,7 @@ jobs: aws s3 cp ./build/distributions/aws-opentelemetry-java-layer.zip s3://adot-main-build-staging-jar/adot-java-lambda-layer-${{ github.run_id }}.zip application-signals-e2e-test: - needs: [build] + needs: [build, application-signals-lambda-layer-build] uses: ./.github/workflows/application-signals-e2e-test.yml secrets: inherit with: diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index 1d2068e830..e85e9d1ed3 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -110,7 +110,7 @@ jobs: aws lambda publish-layer-version \ --layer-name ${{ env.LAYER_NAME }} \ --content S3Bucket=${{ env.BUCKET_NAME }},S3Key=aws-opentelemetry-java-layer.zip \ - --compatible-runtimes java17 java21 \ + --compatible-runtimes java11 java17 java21 \ --compatible-architectures "arm64" "x86_64" \ --license-info "Apache-2.0" \ --description "AWS Distro of OpenTelemetry Lambda Layer for Java Runtime" \ diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java index 4652885090..13cb4ddd81 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java @@ -289,6 +289,10 @@ private Sampler customizeSampler(Sampler sampler, ConfigProperties configProps) private SdkTracerProviderBuilder customizeTracerProviderBuilder( SdkTracerProviderBuilder tracerProviderBuilder, ConfigProperties configProps) { + if (isLambdaEnvironment()) { + tracerProviderBuilder.addSpanProcessor(new AwsLambdaSpanProcessor()); + } + if (isApplicationSignalsEnabled(configProps)) { logger.info("AWS Application Signals enabled"); Duration exportInterval = @@ -300,9 +304,27 @@ private SdkTracerProviderBuilder customizeTracerProviderBuilder( // If running on Lambda, we just need to export 100% spans and skip generating any Application // Signals metrics. - if (isLambdaEnvironment()) { + if (isLambdaEnvironment() + && System.getenv(OTEL_EXPORTER_OTLP_TRACES_ENDPOINT_CONFIG) == null) { + String tracesEndpoint = + Optional.ofNullable(System.getenv(AWS_XRAY_DAEMON_ADDRESS_CONFIG)) + .orElse(DEFAULT_UDP_ENDPOINT); + SpanExporter spanExporter = + new OtlpUdpSpanExporterBuilder() + .setPayloadSampleDecision(TracePayloadSampleDecision.UNSAMPLED) + .setEndpoint(tracesEndpoint) + .build(); + + // Wrap the udp exporter with the AwsMetricsAttributesSpanExporter to add Application + // Signals attributes to unsampled spans too + SpanExporter appSignalsSpanExporter = + AwsMetricAttributesSpanExporterBuilder.create( + spanExporter, ResourceHolder.getResource()) + .build(); + tracerProviderBuilder.addSpanProcessor( AwsUnsampledOnlySpanProcessorBuilder.create() + .setSpanExporter(appSignalsSpanExporter) .setMaxExportBatchSize(LAMBDA_SPAN_EXPORT_BATCH_SIZE) .build()); return tracerProviderBuilder; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAttributeKeys.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAttributeKeys.java index d63a3c9231..65b36ef765 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAttributeKeys.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAttributeKeys.java @@ -126,4 +126,7 @@ private AwsAttributeKeys() {} AttributeKey.stringKey("aws.bedrock.guardrail.id"); static final AttributeKey AWS_GUARDRAIL_ARN = AttributeKey.stringKey("aws.bedrock.guardrail.arn"); + + static final AttributeKey AWS_TRACE_LAMBDA_MULTIPLE_SERVER = + AttributeKey.booleanKey("aws.trace.lambda.multiple-server"); } diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessor.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessor.java new file mode 100644 index 0000000000..65b5f6c07c --- /dev/null +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessor.java @@ -0,0 +1,55 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.providers; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SpanProcessor; +import javax.annotation.concurrent.Immutable; + +@Immutable +public final class AwsLambdaSpanProcessor implements SpanProcessor { + @Override + public void onStart(Context parentContext, ReadWriteSpan span) { + if (AwsSpanProcessingUtil.isServletServerSpan(span)) { + Span parentSpan = Span.fromContextOrNull(parentContext); + if (parentSpan == null || !(parentSpan instanceof ReadWriteSpan)) { + return; + } + + ReadWriteSpan parentReadWriteSpan = (ReadWriteSpan) parentSpan; + if (!AwsSpanProcessingUtil.isLambdaServerSpan(parentReadWriteSpan)) { + return; + } + parentReadWriteSpan.setAttribute(AwsAttributeKeys.AWS_TRACE_LAMBDA_MULTIPLE_SERVER, true); + } + } + + @Override + public boolean isStartRequired() { + return true; + } + + @Override + public void onEnd(ReadableSpan span) {} + + @Override + public boolean isEndRequired() { + return false; + } +} diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java index 1627de190c..4da4d80e3d 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtil.java @@ -39,6 +39,7 @@ import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.data.SpanData; import java.io.IOException; import java.io.InputStream; @@ -70,6 +71,10 @@ final class AwsSpanProcessingUtil { private static final String SQL_DIALECT_KEYWORDS_JSON = "configuration/sql_dialect_keywords.json"; + static final AttributeKey OTEL_SCOPE_NAME = AttributeKey.stringKey("otel.scope.name"); + static final String LAMBDA_SCOPE_PREFIX = "io.opentelemetry.aws-lambda-"; + static final String SERVLET_SCOPE_PREFIX = "io.opentelemetry.servlet-"; + static List getDialectKeywords() { try (InputStream jsonFile = AwsSpanProcessingUtil.class @@ -109,6 +114,10 @@ static String getIngressOperation(SpanData span) { if (operationOverride != null) { return operationOverride; } + String op = generateIngressOperation(span); + if (!op.equals(UNKNOWN_OPERATION)) { + return op; + } return getFunctionNameFromEnv() + "/FunctionHandler"; } String operation = span.getName(); @@ -290,4 +299,30 @@ static boolean isDBSpan(SpanData span) { || isKeyPresent(span, DB_OPERATION) || isKeyPresent(span, DB_STATEMENT); } + + static boolean isLambdaServerSpan(ReadableSpan span) { + String scopeName = null; + if (span != null + && span.toSpanData() != null + && span.toSpanData().getInstrumentationScopeInfo() != null) { + scopeName = span.toSpanData().getInstrumentationScopeInfo().getName(); + } + + return scopeName != null + && scopeName.startsWith(LAMBDA_SCOPE_PREFIX) + && SpanKind.SERVER == span.getKind(); + } + + static boolean isServletServerSpan(ReadableSpan span) { + String scopeName = null; + if (span != null + && span.toSpanData() != null + && span.toSpanData().getInstrumentationScopeInfo() != null) { + scopeName = span.toSpanData().getInstrumentationScopeInfo().getName(); + } + + return scopeName != null + && scopeName.startsWith(SERVLET_SCOPE_PREFIX) + && SpanKind.SERVER == span.getKind(); + } } diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessorTest.java new file mode 100644 index 0000000000..19f1ae005c --- /dev/null +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsLambdaSpanProcessorTest.java @@ -0,0 +1,140 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.providers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.mockito.Mockito.*; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Context; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.ReadWriteSpan; +import io.opentelemetry.sdk.trace.ReadableSpan; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class AwsLambdaSpanProcessorTest { + + private AwsLambdaSpanProcessor processor; + private ReadWriteSpan mockLambdaServerSpan; + private SpanData mockLambdaSpanData; + private InstrumentationScopeInfo mockLambdaScopeInfo; + private Map, Object> attributeMapForLambdaSpan; + private SpanContext mockSpanContext; + + private ReadWriteSpan mockServletServerSpan; + private SpanData mockServletSpanData; + private InstrumentationScopeInfo mockServletScopeInfo; + + private Tracer lambdaTracer; + private Tracer servletTracer; + private Tracer otherTracer; + + @BeforeEach + public void setup() { + processor = new AwsLambdaSpanProcessor(); + lambdaTracer = + SdkTracerProvider.builder() + .addSpanProcessor(processor) + .build() + .get(AwsSpanProcessingUtil.LAMBDA_SCOPE_PREFIX + "core-1.0"); + + servletTracer = + SdkTracerProvider.builder() + .addSpanProcessor(processor) + .build() + .get(AwsSpanProcessingUtil.SERVLET_SCOPE_PREFIX + "lib-3.0"); + + otherTracer = + SdkTracerProvider.builder().addSpanProcessor(processor).build().get("other-lib-2.0"); + } + + @Test + void testOnStart_servletServerSpan_withLambdaServerSpan() { + Span parentSpan = + lambdaTracer.spanBuilder("parent-lambda").setSpanKind(SpanKind.SERVER).startSpan(); + servletTracer + .spanBuilder("child-servlet") + .setSpanKind(SpanKind.SERVER) + .setParent(Context.current().with(parentSpan)) + .startSpan(); + + ReadableSpan parentReadableSpan = (ReadableSpan) parentSpan; + assertThat(parentReadableSpan.getAttribute(AwsAttributeKeys.AWS_TRACE_LAMBDA_MULTIPLE_SERVER)) + .isEqualTo(true); + } + + @Test + void testOnStart_servletInternalSpan_withLambdaServerSpan() { + Span parentSpan = + lambdaTracer.spanBuilder("parent-lambda").setSpanKind(SpanKind.SERVER).startSpan(); + + servletTracer + .spanBuilder("child-servlet") + .setSpanKind(SpanKind.INTERNAL) + .setParent(Context.current().with(parentSpan)) + .startSpan(); + + ReadableSpan parentReadableSpan = (ReadableSpan) parentSpan; + assertNull(parentReadableSpan.getAttribute(AwsAttributeKeys.AWS_TRACE_LAMBDA_MULTIPLE_SERVER)); + } + + @Test + void testOnStart_servletServerSpan_withLambdaInternalSpan() { + Span parentSpan = + lambdaTracer.spanBuilder("parent-lambda").setSpanKind(SpanKind.INTERNAL).startSpan(); + + servletTracer + .spanBuilder("child-servlet") + .setSpanKind(SpanKind.SERVER) + .setParent(Context.current().with(parentSpan)) + .startSpan(); + + ReadableSpan parentReadableSpan = (ReadableSpan) parentSpan; + assertNull(parentReadableSpan.getAttribute(AwsAttributeKeys.AWS_TRACE_LAMBDA_MULTIPLE_SERVER)); + } + + @Test + void testOnStart_servletServerSpan_withLambdaServerSpanAsGrandParent() { + Span grandParentSpan = + lambdaTracer.spanBuilder("grandparent-lambda").setSpanKind(SpanKind.SERVER).startSpan(); + + Span parentSpan = + otherTracer + .spanBuilder("parent-other") + .setSpanKind(SpanKind.SERVER) + .setParent(Context.current().with(grandParentSpan)) + .startSpan(); + + servletTracer + .spanBuilder("child-servlet") + .setSpanKind(SpanKind.SERVER) + .setParent(Context.current().with(parentSpan)) + .startSpan(); + + ReadableSpan grandParentReadableSpan = (ReadableSpan) grandParentSpan; + assertNull( + grandParentReadableSpan.getAttribute(AwsAttributeKeys.AWS_TRACE_LAMBDA_MULTIPLE_SERVER)); + } +} diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java index d4675674dc..b7b2286c2f 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanProcessingUtilTest.java @@ -23,6 +23,8 @@ import static io.opentelemetry.semconv.incubating.MessagingIncubatingAttributes.MessagingOperationTypeIncubatingValues.RECEIVE; import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_SYSTEM; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Answers.CALLS_REAL_METHODS; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; @@ -37,6 +39,7 @@ import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.data.SpanData; import java.util.List; import org.junit.jupiter.api.BeforeEach; @@ -544,4 +547,105 @@ public void testGetKeyValueWithFallback_BothKeysAbsent() { spanDataMock, MESSAGING_OPERATION_TYPE, MESSAGING_OPERATION)) .isNull(); } + + @Test + public void testIsLambdaServerSpan_withLambdaScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()).thenReturn(AwsSpanProcessingUtil.LAMBDA_SCOPE_PREFIX + "-lib-1.0"); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertTrue(AwsSpanProcessingUtil.isLambdaServerSpan(span)); + } + + @Test + public void testIsLambdaServerSpan_withNonLambdaScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()) + .thenReturn("org.abc." + AwsSpanProcessingUtil.LAMBDA_SCOPE_PREFIX + "-lib-3.0"); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertFalse(AwsSpanProcessingUtil.isLambdaServerSpan(span)); + } + + @Test + public void testIsLambdaServerSpan_withNullScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(null); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertFalse(AwsSpanProcessingUtil.isLambdaServerSpan(span)); + } + + @Test + public void testIsLambdaServerSpan_withNonServerSpanKind() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()).thenReturn(AwsSpanProcessingUtil.LAMBDA_SCOPE_PREFIX + "-core-1.0"); + when(span.getKind()).thenReturn(SpanKind.CLIENT); + + assertFalse(AwsSpanProcessingUtil.isLambdaServerSpan(span)); + } + + @Test + public void testIsServletServerSpan_withServletScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()).thenReturn(AwsSpanProcessingUtil.SERVLET_SCOPE_PREFIX + "-3.0"); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertTrue(AwsSpanProcessingUtil.isServletServerSpan(span)); + } + + @Test + public void testIsServletServerSpan_withNonServletScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()).thenReturn(AwsSpanProcessingUtil.LAMBDA_SCOPE_PREFIX + "-2.0"); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertFalse(AwsSpanProcessingUtil.isServletServerSpan(span)); + } + + @Test + public void testIsServletServerSpan_withNullScope() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(null); + when(span.getKind()).thenReturn(SpanKind.SERVER); + + assertFalse(AwsSpanProcessingUtil.isServletServerSpan(span)); + } + + @Test + public void testIsServletServerSpan_withNonServerSpanKind() { + ReadableSpan span = mock(ReadableSpan.class); + SpanData spanData = mock(SpanData.class); + InstrumentationScopeInfo scopeInfo = mock(InstrumentationScopeInfo.class); + when(span.toSpanData()).thenReturn(spanData); + when(spanData.getInstrumentationScopeInfo()).thenReturn(scopeInfo); + when(scopeInfo.getName()).thenReturn(AwsSpanProcessingUtil.SERVLET_SCOPE_PREFIX + "-5.0"); + when(span.getKind()).thenReturn(SpanKind.CLIENT); + + assertFalse(AwsSpanProcessingUtil.isServletServerSpan(span)); + } } diff --git a/lambda-layer/.gitignore b/lambda-layer/.gitignore index 1b6985c009..719d3e0657 100644 --- a/lambda-layer/.gitignore +++ b/lambda-layer/.gitignore @@ -3,3 +3,9 @@ # Ignore Gradle build output directory build + +# Ignore Terraform state files +.terraform/ +*.tfstate +*.tfstate.backup +*.lock.hcl \ No newline at end of file diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index 8c944191de..265a367d66 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -73,4 +73,4 @@ popd ## Cleanup # revert the patch applied since it is only needed while building the layer. echo "Info: Cleanup" -git restore ../dependencyManagement/build.gradle.kts \ No newline at end of file +git restore ../dependencyManagement/build.gradle.kts diff --git a/lambda-layer/otel-instrument b/lambda-layer/otel-instrument index 07815ea51b..8bf5cf4657 100644 --- a/lambda-layer/otel-instrument +++ b/lambda-layer/otel-instrument @@ -65,4 +65,4 @@ fi ARGS=("${ARGS[0]}" "${EXTRA_ARGS[@]}" "${ARGS[@]:1}") -exec "${ARGS[@]}" \ No newline at end of file +exec "${ARGS[@]}" From 4a421342bd0e256233ab8bc610d6a6fcf24b52ee Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Wed, 20 Aug 2025 13:32:20 -0700 Subject: [PATCH 33/83] fix(deps): Upgrade JDK from 17 to 21 (#1157) The upstream OTel Java Agent upgraded its build toolchain from JDK 17 to 21 in May 2024. This PR aligns ADOT with that change. Without this upgrade, merges with recent upstream changes will result in build failures. Build with jdk21 #11219 https://github.com/open-telemetry/opentelemetry-java-instrumentation/pull/11219 Tests performed: Local build with JDK 21 passes: ./gradlew build Unit tests pass: ./gradlew test Smoke/contract tests pass: ./gradlew appsignals-tests:contract-tests:contractTests By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/codeql-analysis.yml | 4 ++-- .../docker-build-smoke-tests-fake-backend.yml | 4 ++-- .../e2e-tests-app-with-java-agent.yml | 16 ++++++++-------- .github/workflows/e2e-tests-with-operator.yml | 6 +++--- .github/workflows/main-build.yml | 14 +++++++------- .../nightly-upstream-snapshot-build.yml | 6 +++--- .github/workflows/owasp.yml | 4 ++-- .github/workflows/patch-release-build.yml | 4 ++-- .github/workflows/pr-build.yml | 18 +++++++++--------- .github/workflows/release-build.yml | 4 ++-- .github/workflows/release-lambda.yml | 4 ++-- .github/workflows/release-udp-exporter.yml | 4 ++-- .github/workflows/udp-exporter-e2e-test.yml | 4 ++-- .java-version | 1 + 14 files changed, 47 insertions(+), 46 deletions(-) create mode 100644 .java-version diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 6f878a0573..c75f707d0a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -31,9 +31,9 @@ jobs: with: languages: java - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Cache local Maven repository diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 1f8dcf4609..3ad1d3f6de 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -21,9 +21,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index d24f4540e0..df1f06307a 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -29,9 +29,9 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs @@ -81,9 +81,9 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -112,9 +112,9 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -143,9 +143,9 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index ab207d1a6d..92dfbee4f9 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -38,9 +38,9 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs @@ -132,7 +132,7 @@ jobs: path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: distribution: 'zulu' java-version: '11' diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 191d545ff0..26f3ba2067 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -23,9 +23,9 @@ jobs: runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node @@ -57,9 +57,9 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # cache local patch outputs @@ -192,7 +192,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: java-version: 23 distribution: 'temurin' @@ -232,9 +232,9 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Build Application Signals Lambda Layer working-directory: lambda-layer diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index 408419033d..7be913ca15 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -27,9 +27,9 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' # cache local patch outputs @@ -132,7 +132,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: java-version: 23 distribution: 'temurin' diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index a505a4f865..6a3dbd5949 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -31,9 +31,9 @@ jobs: fetch-depth: 0 - name: Set up Java for dependency scan - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS credentials for dependency scan diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 725661e5aa..4da82f9a43 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -61,9 +61,9 @@ jobs: with: ref: ${{ needs.prepare-release-branch.outputs.release-branch-name }} - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 14e71745cd..87ecee6357 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -14,9 +14,9 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node @@ -56,9 +56,9 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 @@ -93,7 +93,7 @@ jobs: ./gradlew build -p exporters/aws-distro-opentelemetry-xray-udp-span-exporter - name: Set up Java version for tests - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: java-version: 23 distribution: temurin @@ -109,9 +109,9 @@ jobs: arguments: contractTests -PlocalDocker=true -i - name: Set up Java version for image build - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Get current version @@ -168,9 +168,9 @@ jobs: uses: actions/checkout@v5 - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: temurin - name: Build layer diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index b73636e488..41862f5c08 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -25,9 +25,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index e85e9d1ed3..fbd12eb653 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -42,9 +42,9 @@ jobs: - name: Checkout Repo @ SHA - ${{ github.sha }} uses: actions/checkout@v5 - - uses: actions/setup-java@v4 + - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: 17 + java-version-file: .java-version distribution: 'temurin' - name: Build layers diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index 8d39e7308c..c72b8f7a77 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -29,9 +29,9 @@ jobs: uses: actions/checkout@v5 - name: Set up Java - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: '17' + java-version-file: .java-version distribution: 'temurin' cache: 'gradle' diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index 33ad3d756a..a3e4631db8 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -16,9 +16,9 @@ jobs: uses: actions/checkout@v5 - name: Set up Java - uses: actions/setup-java@v4 + uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: - java-version: '17' + java-version-file: .java-version distribution: 'temurin' cache: 'gradle' diff --git a/.java-version b/.java-version new file mode 100644 index 0000000000..aabe6ec390 --- /dev/null +++ b/.java-version @@ -0,0 +1 @@ +21 From a62f47ff02a1d3ee708b681ed755b6557f1d7398 Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Fri, 22 Aug 2025 15:40:52 -0700 Subject: [PATCH 34/83] fix(deps): update OTel Java Agent to v2.18.1 and OTel Contrib to v1.48.0 (#1156) This updates upstream dependency OTel Java Agent to v2.18.1 and upgrades upstream OTel Contrib to v1.48.0. It removes two patch files, as those code has been migrated to upstream. For the OTel Java Contrib patch, it has been replaced by PR-1959 https://github.com/open-telemetry/opentelemetry-java-contrib/pull/1959 , which is included in the OTel Java Contrib v1.48.0 release. For the OTel Java Agent patch for the Lambda stream handler, it has been replaced by PR-13466 https://github.com/open-telemetry/opentelemetry-java-instrumentation/pull/13466 , which is included in the OTel Java Agent v2.18.1 release. Tests: - Unit tests pass: ./gradlew build test - Smoke/Contract tests pass: ./gradlew appsignals-tests:contract-tests:contractTests - X-Ray remote sampling service end-to-end manual tests pass - EC2 end-to-end tests with SpringBoot Java app pass - Lambda end-to-end tests with SpringBoot Java app pass By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../patches/opentelemetry-java-contrib.patch | 237 -------- .github/patches/versions | 3 +- .../javaagent/providers/UdpExporterTest.java | 7 +- dependencyManagement/build.gradle.kts | 8 +- instrumentation/aws-sdk/build.gradle.kts | 2 + lambda-layer/build-layer.sh | 2 - .../StreamHandlerInstrumentation.patch | 513 ------------------ .../aws-otel-java-instrumentation.patch | 10 +- .../opentelemetry-java-instrumentation.patch | 12 +- 9 files changed, 23 insertions(+), 771 deletions(-) delete mode 100644 .github/patches/opentelemetry-java-contrib.patch delete mode 100644 lambda-layer/patches/StreamHandlerInstrumentation.patch diff --git a/.github/patches/opentelemetry-java-contrib.patch b/.github/patches/opentelemetry-java-contrib.patch deleted file mode 100644 index 65a6f8b780..0000000000 --- a/.github/patches/opentelemetry-java-contrib.patch +++ /dev/null @@ -1,237 +0,0 @@ -diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -index 1ef8abf..ef84f35 100644 ---- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -+++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -@@ -35,6 +35,11 @@ final class SamplingRuleApplier { - - private static final Map XRAY_CLOUD_PLATFORM; - -+ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); -+ private static final AttributeKey URL_FULL = AttributeKey.stringKey("url.full"); -+ private static final AttributeKey HTTP_REQUEST_METHOD = -+ AttributeKey.stringKey("http.request.method"); -+ - static { - Map xrayCloudPlatform = new HashMap<>(); - xrayCloudPlatform.put(ResourceAttributes.CloudPlatformValues.AWS_EC2, "AWS::EC2::Instance"); -@@ -162,11 +167,14 @@ final class SamplingRuleApplier { - String host = null; - - for (Map.Entry, Object> entry : attributes.asMap().entrySet()) { -- if (entry.getKey().equals(SemanticAttributes.HTTP_TARGET)) { -+ if (entry.getKey().equals(SemanticAttributes.HTTP_TARGET) -+ || entry.getKey().equals(URL_PATH)) { - httpTarget = (String) entry.getValue(); -- } else if (entry.getKey().equals(SemanticAttributes.HTTP_URL)) { -+ } else if (entry.getKey().equals(SemanticAttributes.HTTP_URL) -+ || entry.getKey().equals(URL_FULL)) { - httpUrl = (String) entry.getValue(); -- } else if (entry.getKey().equals(SemanticAttributes.HTTP_METHOD)) { -+ } else if (entry.getKey().equals(SemanticAttributes.HTTP_METHOD) -+ || entry.getKey().equals(HTTP_REQUEST_METHOD)) { - httpMethod = (String) entry.getValue(); - } else if (entry.getKey().equals(SemanticAttributes.NET_HOST_NAME)) { - host = (String) entry.getValue(); -diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -index 6bb6e82..55dabbd 100644 ---- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -+++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -@@ -42,6 +42,11 @@ class SamplingRuleApplierTest { - - private static final String CLIENT_ID = "test-client-id"; - -+ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); -+ private static final AttributeKey URL_FULL = AttributeKey.stringKey("url.full"); -+ private static final AttributeKey HTTP_REQUEST_METHOD = -+ AttributeKey.stringKey("http.request.method"); -+ - @Nested - @SuppressWarnings("ClassCanBeStatic") - class ExactMatch { -@@ -68,6 +73,15 @@ class SamplingRuleApplierTest { - .put(AttributeKey.longKey("speed"), 10) - .build(); - -+ private final Attributes newSemCovAttributes = -+ Attributes.builder() -+ .put(HTTP_REQUEST_METHOD, "GET") -+ .put(SemanticAttributes.NET_HOST_NAME, "opentelemetry.io") -+ .put(URL_PATH, "/instrument-me") -+ .put(AttributeKey.stringKey("animal"), "cat") -+ .put(AttributeKey.longKey("speed"), 10) -+ .build(); -+ - // FixedRate set to 1.0 in rule and no reservoir - @Test - void fixedRateAlwaysSample() { -@@ -116,6 +130,21 @@ class SamplingRuleApplierTest { - .isTrue(); - } - -+ @Test -+ void matchesURLFullNewSemCov() { -+ assertThat(applier.matches(newSemCovAttributes, resource)).isTrue(); -+ -+ // http.url works too -+ assertThat( -+ applier.matches( -+ attributes.toBuilder() -+ .remove(URL_FULL) -+ .put(URL_FULL, "scheme://host:port/instrument-me") -+ .build(), -+ resource)) -+ .isTrue(); -+ } -+ - @Test - void serviceNameNotMatch() { - assertThat( -@@ -137,6 +166,13 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void methodNewSemCovNotMatch() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "POST").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void hostNotMatch() { - // Replacing dot with character makes sure we're not accidentally treating dot as regex -@@ -178,6 +214,34 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void pathNewSemCovNotMatch() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-you").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "scheme://host:port/instrument-you") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "scheme://host:port") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ -+ // Correct path, but we ignore anyways since the URL is malformed per spec, scheme is always -+ // present. -+ attributes = -+ this.newSemCovAttributes.toBuilder() -+ .remove(URL_PATH) -+ .put(URL_FULL, "host:port/instrument-me") -+ .build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void attributeNotMatch() { - Attributes attributes = -@@ -243,6 +307,15 @@ class SamplingRuleApplierTest { - .put(AttributeKey.longKey("speed"), 10) - .build(); - -+ private final Attributes newSemCovAttributes = -+ Attributes.builder() -+ .put(HTTP_REQUEST_METHOD, "GET") -+ .put(SemanticAttributes.NET_HOST_NAME, "opentelemetry.io") -+ .put(URL_PATH, "/instrument-me?foo=bar&cat=meow") -+ .put(AttributeKey.stringKey("animal"), "cat") -+ .put(AttributeKey.longKey("speed"), 10) -+ .build(); -+ - // FixedRate set to 0.0 in rule and no reservoir - @Test - void fixedRateNeverSample() { -@@ -329,6 +402,26 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void newSemCovMethodMatches() { -+ Attributes attributes = -+ this.newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "BADGETGOOD").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ attributes = newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "BADGET").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ attributes = newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "GETGET").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ } -+ -+ @Test -+ void newSemCovMethodNotMatch() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(HTTP_REQUEST_METHOD, "POST").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = removeAttribute(newSemCovAttributes, HTTP_REQUEST_METHOD); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void hostMatches() { - Attributes attributes = -@@ -410,6 +503,29 @@ class SamplingRuleApplierTest { - assertThat(applier.matches(attributes, resource)).isFalse(); - } - -+ @Test -+ void pathNewSemCovMatches() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-me?foo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ // Deceptive question mark, it's actually a wildcard :-) -+ attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-meafoo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isTrue(); -+ } -+ -+ @Test -+ void pathNewSemCovNotMatch() { -+ Attributes attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "/instrument-mea?foo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = -+ newSemCovAttributes.toBuilder().put(URL_PATH, "foo/instrument-meafoo=bar&cat=").build(); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ attributes = removeAttribute(newSemCovAttributes, URL_PATH); -+ assertThat(applier.matches(attributes, resource)).isFalse(); -+ } -+ - @Test - void attributeMatches() { - Attributes attributes = -diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts -index 041d2e9..e3d60f4 100644 ---- a/disk-buffering/build.gradle.kts -+++ b/disk-buffering/build.gradle.kts -@@ -70,6 +70,10 @@ tasks.named("shadowJar") { - mustRunAfter("jar") - } - -+tasks.withType().configureEach { -+ dependsOn("shadowJar") -+} -+ - // The javadoc from wire's generated classes has errors that make the task that generates the "javadoc" artifact to fail. This - // makes the javadoc task to ignore those generated classes. - tasks.withType(Javadoc::class.java) { -diff --git a/version.gradle.kts b/version.gradle.kts -index acefcee..329b524 100644 ---- a/version.gradle.kts -+++ b/version.gradle.kts -@@ -1,5 +1,5 @@ --val stableVersion = "1.39.0" --val alphaVersion = "1.39.0-alpha" -+val stableVersion = "1.39.0-adot1" -+val alphaVersion = "1.39.0-alpha-adot1" - - allprojects { - if (findProperty("otel.stable") != "true") { diff --git a/.github/patches/versions b/.github/patches/versions index be4e3bbcb6..03f4b3f51f 100644 --- a/.github/patches/versions +++ b/.github/patches/versions @@ -1,2 +1 @@ -OTEL_JAVA_INSTRUMENTATION_VERSION=v2.11.0 -OTEL_JAVA_CONTRIB_VERSION=v1.39.0 \ No newline at end of file +OTEL_JAVA_INSTRUMENTATION_VERSION=v2.18.1 diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java index 2a1cea7106..b070d53bba 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/UdpExporterTest.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; import io.opentelemetry.api.common.AttributeKey; @@ -36,8 +37,10 @@ public class UdpExporterTest { public void testUdpExporterWithDefaults() { OtlpUdpSpanExporter exporter = new OtlpUdpSpanExporterBuilder().build(); UdpSender sender = exporter.getSender(); - assertThat(sender.getEndpoint().getHostName()) - .isEqualTo("localhost"); // getHostName implicitly converts 127.0.0.1 to localhost + String senderEndpointHostName = sender.getEndpoint().getHostName(); + // getHostName may or may not convert 127.0.0.1 to localhost + assertTrue( + senderEndpointHostName.equals("localhost") || senderEndpointHostName.equals("127.0.0.1")); assertThat(sender.getEndpoint().getPort()).isEqualTo(2000); assertThat(exporter.getPayloadPrefix()).endsWith("T1S"); } diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 11a6441070..d186406009 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,8 +27,8 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.11.0" -val otelSnapshotVersion = "2.12.0" +val otelVersion = "2.18.1" +val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" // All versions below are only used in testing and do not affect the released artifact. @@ -76,8 +76,8 @@ val dependencyLists = listOf( "commons-logging:commons-logging:1.2", "com.sparkjava:spark-core:2.9.4", "com.squareup.okhttp3:okhttp:4.12.0", - "io.opentelemetry.contrib:opentelemetry-aws-xray:1.39.0-adot1", - "io.opentelemetry.contrib:opentelemetry-aws-resources:1.39.0-alpha", + "io.opentelemetry.contrib:opentelemetry-aws-xray:1.48.0", + "io.opentelemetry.contrib:opentelemetry-aws-resources:1.48.0-alpha", "io.opentelemetry.proto:opentelemetry-proto:1.0.0-alpha", "io.opentelemetry.javaagent:opentelemetry-javaagent:$otelJavaAgentVersion", "io.opentelemetry:opentelemetry-extension-aws:1.20.1", diff --git a/instrumentation/aws-sdk/build.gradle.kts b/instrumentation/aws-sdk/build.gradle.kts index 101e966a12..58fb6b48d7 100644 --- a/instrumentation/aws-sdk/build.gradle.kts +++ b/instrumentation/aws-sdk/build.gradle.kts @@ -41,4 +41,6 @@ dependencies { testImplementation("com.amazonaws:aws-java-sdk-sns:1.11.106") testImplementation("com.amazonaws:aws-java-sdk-stepfunctions:1.11.230") testImplementation("com.amazonaws:aws-java-sdk-secretsmanager:1.11.309") + + testRuntimeOnly("org.junit.platform:junit-platform-launcher") } diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index 265a367d66..ddd144716d 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -25,8 +25,6 @@ git checkout v${version} -b tag-v${version} # This patch is for Lambda related context propagation patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch -patch -p1 < "$SOURCEDIR"/patches/StreamHandlerInstrumentation.patch - ./gradlew publishToMavenLocal popd rm -rf opentelemetry-java-instrumentation diff --git a/lambda-layer/patches/StreamHandlerInstrumentation.patch b/lambda-layer/patches/StreamHandlerInstrumentation.patch deleted file mode 100644 index c4d4751c89..0000000000 --- a/lambda-layer/patches/StreamHandlerInstrumentation.patch +++ /dev/null @@ -1,513 +0,0 @@ -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -index 35d6b70ed6..b6a305178e 100644 ---- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaInstrumentationModule.java -@@ -6,17 +6,18 @@ - package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; - - import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; --import static java.util.Collections.singletonList; - import static net.bytebuddy.matcher.ElementMatchers.not; - - import com.google.auto.service.AutoService; - import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; - import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import java.util.Arrays; - import java.util.List; - import net.bytebuddy.matcher.ElementMatcher; - - @AutoService(InstrumentationModule.class) - public class AwsLambdaInstrumentationModule extends InstrumentationModule { -+ - public AwsLambdaInstrumentationModule() { - super("aws-lambda-core", "aws-lambda-core-1.0", "aws-lambda"); - } -@@ -34,6 +35,8 @@ public class AwsLambdaInstrumentationModule extends InstrumentationModule { - - @Override - public List typeInstrumentations() { -- return singletonList(new AwsLambdaRequestHandlerInstrumentation()); -+ return Arrays.asList( -+ new AwsLambdaRequestHandlerInstrumentation(), -+ new AwsLambdaRequestStreamHandlerInstrumentation()); - } - } -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java -new file mode 100644 -index 0000000000..1c4ef1ac07 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java -@@ -0,0 +1,98 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; -+ -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; -+import static io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0.AwsLambdaInstrumentationHelper.functionInstrumenter; -+import static net.bytebuddy.matcher.ElementMatchers.isMethod; -+import static net.bytebuddy.matcher.ElementMatchers.isPublic; -+import static net.bytebuddy.matcher.ElementMatchers.nameStartsWith; -+import static net.bytebuddy.matcher.ElementMatchers.named; -+import static net.bytebuddy.matcher.ElementMatchers.not; -+import static net.bytebuddy.matcher.ElementMatchers.takesArgument; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import io.opentelemetry.context.Scope; -+import io.opentelemetry.instrumentation.awslambdacore.v1_0.AwsLambdaRequest; -+import io.opentelemetry.javaagent.bootstrap.OpenTelemetrySdkAccess; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; -+import java.io.InputStream; -+import java.util.Collections; -+import java.util.concurrent.TimeUnit; -+import net.bytebuddy.asm.Advice; -+import net.bytebuddy.description.type.TypeDescription; -+import net.bytebuddy.implementation.bytecode.assign.Assigner.Typing; -+import net.bytebuddy.matcher.ElementMatcher; -+ -+public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrumentation { -+ -+ @Override -+ public ElementMatcher classLoaderOptimization() { -+ return hasClassesNamed("com.amazonaws.services.lambda.runtime.RequestStreamHandler"); -+ } -+ -+ @Override -+ public ElementMatcher typeMatcher() { -+ return implementsInterface(named("com.amazonaws.services.lambda.runtime.RequestStreamHandler")) -+ .and(not(nameStartsWith("com.amazonaws.services.lambda.runtime.api.client"))) -+ // In Java 8 and Java 11 runtimes, -+ // AWS Lambda runtime is packaged under `lambdainternal` package. -+ // But it is `com.amazonaws.services.lambda.runtime.api.client` -+ // for new runtime likes Java 17 and Java 21. -+ .and(not(nameStartsWith("lambdainternal"))); -+ } -+ -+ @Override -+ public void transform(TypeTransformer transformer) { -+ transformer.applyAdviceToMethod( -+ isMethod() -+ .and(isPublic()) -+ .and(named("handleRequest")) -+ .and(takesArgument(2, named("com.amazonaws.services.lambda.runtime.Context"))), -+ AwsLambdaRequestStreamHandlerInstrumentation.class.getName() + "$HandleRequestAdvice"); -+ } -+ -+ @SuppressWarnings("unused") -+ public static class HandleRequestAdvice { -+ -+ @Advice.OnMethodEnter(suppress = Throwable.class) -+ public static void onEnter( -+ @Advice.Argument(0) InputStream input, -+ @Advice.Argument(2) Context context, -+ @Advice.Local("otelInput") AwsLambdaRequest otelInput, -+ @Advice.Local("otelContext") io.opentelemetry.context.Context otelContext, -+ @Advice.Local("otelScope") Scope otelScope) { -+ -+ otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); -+ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput); -+ -+ if (!functionInstrumenter().shouldStart(parentContext, otelInput)) { -+ return; -+ } -+ -+ otelContext = functionInstrumenter().start(parentContext, otelInput); -+ otelScope = otelContext.makeCurrent(); -+ } -+ -+ @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) -+ public static void stopSpan( -+ @Advice.Argument(value = 0, typing = Typing.DYNAMIC) Object arg, -+ @Advice.Thrown Throwable throwable, -+ @Advice.Local("otelInput") AwsLambdaRequest input, -+ @Advice.Local("otelContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelScope") Scope functionScope) { -+ -+ if (functionScope != null) { -+ functionScope.close(); -+ functionInstrumenter().end(functionContext, input, null, throwable); -+ } -+ -+ OpenTelemetrySdkAccess.forceFlush((long)1, TimeUnit.SECONDS); -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java -new file mode 100644 -index 0000000000..7bed968d77 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaStreamHandlerTest.java -@@ -0,0 +1,113 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdacore.v1_0; -+ -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static org.assertj.core.api.Assertions.assertThat; -+import static org.assertj.core.api.Assertions.catchThrowable; -+import static org.mockito.Mockito.when; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -+import io.opentelemetry.api.trace.SpanKind; -+import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; -+import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; -+import io.opentelemetry.sdk.trace.data.StatusData; -+import io.opentelemetry.semconv.SemanticAttributes; -+import java.io.BufferedReader; -+import java.io.BufferedWriter; -+import java.io.ByteArrayInputStream; -+import java.io.ByteArrayOutputStream; -+import java.io.IOException; -+import java.io.InputStream; -+import java.io.InputStreamReader; -+import java.io.OutputStream; -+import java.io.OutputStreamWriter; -+import java.nio.charset.StandardCharsets; -+import org.junit.jupiter.api.AfterEach; -+import org.junit.jupiter.api.BeforeEach; -+import org.junit.jupiter.api.Test; -+import org.junit.jupiter.api.extension.ExtendWith; -+import org.junit.jupiter.api.extension.RegisterExtension; -+import org.mockito.Mock; -+import org.mockito.junit.jupiter.MockitoExtension; -+ -+@ExtendWith(MockitoExtension.class) -+public class AwsLambdaStreamHandlerTest { -+ -+ @RegisterExtension -+ public static final InstrumentationExtension testing = AgentInstrumentationExtension.create(); -+ -+ @Mock private Context context; -+ -+ @BeforeEach -+ void setUp() { -+ when(context.getFunctionName()).thenReturn("my_function"); -+ when(context.getAwsRequestId()).thenReturn("1-22-333"); -+ } -+ -+ @AfterEach -+ void tearDown() { -+ assertThat(testing.forceFlushCalled()).isTrue(); -+ } -+ -+ @Test -+ void handlerTraced() throws Exception { -+ InputStream input = new ByteArrayInputStream("hello\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ handler.handleRequest(input, output, context); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ @Test -+ void handlerTracedWithException() { -+ InputStream input = new ByteArrayInputStream("bye\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ -+ Throwable thrown = catchThrowable(() -> handler.handleRequest(input, output, context)); -+ assertThat(thrown).isInstanceOf(IllegalArgumentException.class); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasStatus(StatusData.error()) -+ .hasException(thrown) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ static final class RequestStreamHandlerTestImpl implements RequestStreamHandler { -+ @Override -+ public void handleRequest(InputStream input, OutputStream output, Context context) -+ throws IOException { -+ BufferedReader reader = -+ new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8)); -+ BufferedWriter writer = -+ new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8)); -+ String line = reader.readLine(); -+ if (line.equals("hello")) { -+ writer.write("world"); -+ writer.flush(); -+ writer.close(); -+ } else { -+ throw new IllegalArgumentException("bad argument"); -+ } -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -index 9e0e372241..2dd6051c23 100644 ---- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaInstrumentationModule.java -@@ -6,11 +6,11 @@ - package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; - - import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; --import static java.util.Collections.singletonList; - - import com.google.auto.service.AutoService; - import io.opentelemetry.javaagent.extension.instrumentation.InstrumentationModule; - import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import java.util.Arrays; - import java.util.List; - import net.bytebuddy.matcher.ElementMatcher; - -@@ -32,6 +32,8 @@ public class AwsLambdaInstrumentationModule extends InstrumentationModule { - - @Override - public List typeInstrumentations() { -- return singletonList(new AwsLambdaRequestHandlerInstrumentation()); -+ return Arrays.asList( -+ new AwsLambdaRequestHandlerInstrumentation(), -+ new AwsLambdaRequestStreamHandlerInstrumentation()); - } - } -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java -new file mode 100644 -index 0000000000..f21a4a5526 ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java -@@ -0,0 +1,104 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; -+ -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.hasClassesNamed; -+import static io.opentelemetry.javaagent.extension.matcher.AgentElementMatchers.implementsInterface; -+import static net.bytebuddy.matcher.ElementMatchers.isMethod; -+import static net.bytebuddy.matcher.ElementMatchers.isPublic; -+import static net.bytebuddy.matcher.ElementMatchers.named; -+import static net.bytebuddy.matcher.ElementMatchers.takesArgument; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.events.SQSEvent; -+import io.opentelemetry.context.Scope; -+import io.opentelemetry.instrumentation.awslambdacore.v1_0.AwsLambdaRequest; -+import io.opentelemetry.javaagent.bootstrap.OpenTelemetrySdkAccess; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; -+import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; -+import java.io.InputStream; -+import java.util.Collections; -+import java.util.concurrent.TimeUnit; -+import net.bytebuddy.asm.Advice; -+import net.bytebuddy.description.type.TypeDescription; -+import net.bytebuddy.implementation.bytecode.assign.Assigner.Typing; -+import net.bytebuddy.matcher.ElementMatcher; -+ -+public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrumentation { -+ -+ @Override -+ public ElementMatcher classLoaderOptimization() { -+ return hasClassesNamed("com.amazonaws.services.lambda.runtime.RequestStreamHandler"); -+ } -+ -+ @Override -+ public ElementMatcher typeMatcher() { -+ return implementsInterface(named("com.amazonaws.services.lambda.runtime.RequestStreamHandler")); -+ } -+ -+ @Override -+ public void transform(TypeTransformer transformer) { -+ transformer.applyAdviceToMethod( -+ isMethod() -+ .and(isPublic()) -+ .and(named("handleRequest")) -+ .and(takesArgument(2, named("com.amazonaws.services.lambda.runtime.Context"))), -+ AwsLambdaRequestStreamHandlerInstrumentation.class.getName() + "$HandleRequestAdvice"); -+ } -+ -+ @SuppressWarnings("unused") -+ public static class HandleRequestAdvice { -+ -+ @Advice.OnMethodEnter(suppress = Throwable.class) -+ public static void onEnter( -+ @Advice.Argument(0) InputStream input, -+ @Advice.Argument(2) Context context, -+ @Advice.Local("otelInput") AwsLambdaRequest otelInput, -+ @Advice.Local("otelFunctionContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelFunctionScope") Scope functionScope, -+ @Advice.Local("otelMessageContext") io.opentelemetry.context.Context messageContext, -+ @Advice.Local("otelMessageScope") Scope messageScope) { -+ otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); -+ io.opentelemetry.context.Context parentContext = -+ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput); -+ -+ if (!AwsLambdaInstrumentationHelper.functionInstrumenter() -+ .shouldStart(parentContext, otelInput)) { -+ return; -+ } -+ -+ functionContext = -+ AwsLambdaInstrumentationHelper.functionInstrumenter().start(parentContext, otelInput); -+ -+ functionScope = functionContext.makeCurrent(); -+ } -+ -+ @Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class) -+ public static void stopSpan( -+ @Advice.Argument(value = 0, typing = Typing.DYNAMIC) Object arg, -+ @Advice.Thrown Throwable throwable, -+ @Advice.Local("otelInput") AwsLambdaRequest input, -+ @Advice.Local("otelFunctionContext") io.opentelemetry.context.Context functionContext, -+ @Advice.Local("otelFunctionScope") Scope functionScope, -+ @Advice.Local("otelMessageContext") io.opentelemetry.context.Context messageContext, -+ @Advice.Local("otelMessageScope") Scope messageScope) { -+ -+ if (messageScope != null) { -+ messageScope.close(); -+ AwsLambdaInstrumentationHelper.messageInstrumenter() -+ .end(messageContext, (SQSEvent) arg, null, throwable); -+ } -+ -+ if (functionScope != null) { -+ functionScope.close(); -+ AwsLambdaInstrumentationHelper.functionInstrumenter() -+ .end(functionContext, input, null, throwable); -+ } -+ -+ OpenTelemetrySdkAccess.forceFlush((long)1, TimeUnit.SECONDS); -+ } -+ } -+} -diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java -new file mode 100644 -index 0000000000..e30690418d ---- /dev/null -+++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/test/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaStreamHandlerTest.java -@@ -0,0 +1,113 @@ -+/* -+ * Copyright The OpenTelemetry Authors -+ * SPDX-License-Identifier: Apache-2.0 -+ */ -+ -+package io.opentelemetry.javaagent.instrumentation.awslambdaevents.v2_2; -+ -+import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; -+import static org.assertj.core.api.Assertions.assertThat; -+import static org.assertj.core.api.Assertions.catchThrowable; -+import static org.mockito.Mockito.when; -+ -+import com.amazonaws.services.lambda.runtime.Context; -+import com.amazonaws.services.lambda.runtime.RequestStreamHandler; -+import io.opentelemetry.api.trace.SpanKind; -+import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; -+import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; -+import io.opentelemetry.sdk.trace.data.StatusData; -+import io.opentelemetry.semconv.SemanticAttributes; -+import java.io.BufferedReader; -+import java.io.BufferedWriter; -+import java.io.ByteArrayInputStream; -+import java.io.ByteArrayOutputStream; -+import java.io.IOException; -+import java.io.InputStream; -+import java.io.InputStreamReader; -+import java.io.OutputStream; -+import java.io.OutputStreamWriter; -+import java.nio.charset.StandardCharsets; -+import org.junit.jupiter.api.AfterEach; -+import org.junit.jupiter.api.BeforeEach; -+import org.junit.jupiter.api.Test; -+import org.junit.jupiter.api.extension.ExtendWith; -+import org.junit.jupiter.api.extension.RegisterExtension; -+import org.mockito.Mock; -+import org.mockito.junit.jupiter.MockitoExtension; -+ -+@ExtendWith(MockitoExtension.class) -+public class AwsLambdaStreamHandlerTest { -+ -+ @RegisterExtension -+ public static final InstrumentationExtension testing = AgentInstrumentationExtension.create(); -+ -+ @Mock private Context context; -+ -+ @BeforeEach -+ void setUp() { -+ when(context.getFunctionName()).thenReturn("my_function"); -+ when(context.getAwsRequestId()).thenReturn("1-22-333"); -+ } -+ -+ @AfterEach -+ void tearDown() { -+ assertThat(testing.forceFlushCalled()).isTrue(); -+ } -+ -+ @Test -+ void handlerTraced() throws Exception { -+ InputStream input = new ByteArrayInputStream("hello\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ handler.handleRequest(input, output, context); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ @Test -+ void handlerTracedWithException() { -+ InputStream input = new ByteArrayInputStream("bye\n".getBytes(StandardCharsets.UTF_8)); -+ OutputStream output = new ByteArrayOutputStream(); -+ RequestStreamHandlerTestImpl handler = new RequestStreamHandlerTestImpl(); -+ -+ Throwable thrown = catchThrowable(() -> handler.handleRequest(input, output, context)); -+ assertThat(thrown).isInstanceOf(IllegalArgumentException.class); -+ -+ testing.waitAndAssertTraces( -+ trace -> -+ trace.hasSpansSatisfyingExactly( -+ span -> -+ span.hasName("my_function") -+ .hasKind(SpanKind.SERVER) -+ .hasStatus(StatusData.error()) -+ .hasException(thrown) -+ .hasAttributesSatisfyingExactly( -+ equalTo(SemanticAttributes.FAAS_INVOCATION_ID, "1-22-333")))); -+ } -+ -+ static final class RequestStreamHandlerTestImpl implements RequestStreamHandler { -+ @Override -+ public void handleRequest(InputStream input, OutputStream output, Context context) -+ throws IOException { -+ BufferedReader reader = -+ new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8)); -+ BufferedWriter writer = -+ new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8)); -+ String line = reader.readLine(); -+ if (line.equals("hello")) { -+ writer.write("world"); -+ writer.flush(); -+ writer.close(); -+ } else { -+ throw new IllegalArgumentException("bad argument"); -+ } -+ } -+ } -+} diff --git a/lambda-layer/patches/aws-otel-java-instrumentation.patch b/lambda-layer/patches/aws-otel-java-instrumentation.patch index 6b1f5eb9d5..bbd66b64c1 100644 --- a/lambda-layer/patches/aws-otel-java-instrumentation.patch +++ b/lambda-layer/patches/aws-otel-java-instrumentation.patch @@ -1,13 +1,13 @@ diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts -index 9493189..6090207 100644 +index d186406..91b9386 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,7 +27,7 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" - + // This is the version of the upstream instrumentation BOM --val otelVersion = "2.11.0" -+val otelVersion = "2.11.0-adot-lambda1" - val otelSnapshotVersion = "2.12.0" +-val otelVersion = "2.18.1" ++val otelVersion = "2.18.1-adot-lambda1" + val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" diff --git a/lambda-layer/patches/opentelemetry-java-instrumentation.patch b/lambda-layer/patches/opentelemetry-java-instrumentation.patch index a4004e3330..9f4baa1481 100644 --- a/lambda-layer/patches/opentelemetry-java-instrumentation.patch +++ b/lambda-layer/patches/opentelemetry-java-instrumentation.patch @@ -306,14 +306,14 @@ index 4cd11fc0c4..7b7d62755c 100644 } diff --git a/version.gradle.kts b/version.gradle.kts -index 7900c9a4d9..80383d7c22 100644 +index 023d04703c..b267166804 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "2.11.0" --val alphaVersion = "2.11.0-alpha" -+val stableVersion = "2.11.0-adot-lambda1" -+val alphaVersion = "2.11.0-adot-lambda1-alpha" - +-val stableVersion = "2.18.1" +-val alphaVersion = "2.18.1-alpha" ++val stableVersion = "2.18.1-adot-lambda1" ++val alphaVersion = "2.18.1-adot-lambda1-alpha" + allprojects { if (findProperty("otel.stable") != "true") { From 5e035320b9783755b04702d1c2b74ac2b0d69659 Mon Sep 17 00:00:00 2001 From: Mahad Janjua <134644284+majanjua-amzn@users.noreply.github.com> Date: Fri, 29 Aug 2025 16:43:26 -0700 Subject: [PATCH 35/83] Introduce AWS X-Ray Adaptive Sampling support (#1170) --- .../patches/opentelemetry-java-contrib.patch | 3116 +++++++++++++++++ .github/patches/versions | 1 + awsagentprovider/build.gradle.kts | 4 +- .../AttributePropagatingSpanProcessor.java | 1 + .../AwsAgentPropertiesCustomizerProvider.java | 2 +- ...sApplicationSignalsCustomizerProvider.java | 94 +- .../providers/AwsSpanMetricsProcessor.java | 18 +- .../AwsSpanMetricsProcessorBuilder.java | 21 +- ...licationSignalsCustomizerProviderTest.java | 102 + .../AwsSpanMetricsProcessorTest.java | 4 + .../adaptive-sampling-config-invalid.yaml | 13 + .../adaptive-sampling-config-valid.yaml | 12 + dependencyManagement/build.gradle.kts | 2 +- .../runner/SpringBootSmokeTest.java | 6 +- 14 files changed, 3379 insertions(+), 17 deletions(-) create mode 100644 .github/patches/opentelemetry-java-contrib.patch create mode 100644 awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProviderTest.java create mode 100644 awsagentprovider/src/test/resources/adaptive-sampling-config-invalid.yaml create mode 100644 awsagentprovider/src/test/resources/adaptive-sampling-config-valid.yaml diff --git a/.github/patches/opentelemetry-java-contrib.patch b/.github/patches/opentelemetry-java-contrib.patch new file mode 100644 index 0000000000..718fa85de4 --- /dev/null +++ b/.github/patches/opentelemetry-java-contrib.patch @@ -0,0 +1,3116 @@ +diff --git a/.github/renovate.json5 b/.github/renovate.json5 +index 4f7743a3..9e2082ed 100644 +--- a/.github/renovate.json5 ++++ b/.github/renovate.json5 +@@ -176,5 +176,27 @@ + 'npx (?[^@]+)@(?[^\\s]+)', + ], + }, ++ { ++ customType: 'regex', ++ datasourceTemplate: 'java-version', ++ managerFilePatterns: [ ++ '.github/workflows/**', ++ ], ++ matchStrings: [ ++ '(?\\d+) # renovate: datasource=java-version', ++ ], ++ depNameTemplate: 'java', ++ extractVersionTemplate: '^(?\\d+)', ++ }, ++ { ++ customType: 'regex', ++ datasourceTemplate: 'github-releases', ++ managerFilePatterns: [ ++ '**/build.gradle.kts', ++ ], ++ matchStrings: [ ++ '"https://github.com/(?[^/]+/[^/]+)/zipball/(?.+?)"', ++ ], ++ }, + ], + } +diff --git a/aws-xray/build.gradle.kts b/aws-xray/build.gradle.kts +index 54dabba7..d56b12bd 100644 +--- a/aws-xray/build.gradle.kts ++++ b/aws-xray/build.gradle.kts +@@ -11,6 +11,7 @@ dependencies { + api("io.opentelemetry:opentelemetry-sdk-trace") + + compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure") ++ implementation("io.opentelemetry.semconv:opentelemetry-semconv:1.32.0-alpha") + + implementation("com.squareup.okhttp3:okhttp") + implementation("io.opentelemetry.semconv:opentelemetry-semconv") +@@ -25,6 +26,7 @@ dependencies { + + implementation("com.fasterxml.jackson.core:jackson-core") + implementation("com.fasterxml.jackson.core:jackson-databind") ++ implementation("com.github.ben-manes.caffeine:caffeine:2.9.3") + + testImplementation("com.linecorp.armeria:armeria-junit5") + testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure") +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java +new file mode 100644 +index 00000000..41f22f90 +--- /dev/null ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java +@@ -0,0 +1,54 @@ ++/* ++ * Copyright The OpenTelemetry Authors ++ * SPDX-License-Identifier: Apache-2.0 ++ */ ++ ++package io.opentelemetry.contrib.awsxray; ++ ++import io.opentelemetry.api.common.Attributes; ++import io.opentelemetry.api.trace.TraceState; ++import io.opentelemetry.sdk.trace.samplers.SamplingDecision; ++import io.opentelemetry.sdk.trace.samplers.SamplingResult; ++ ++final class AwsSamplingResult implements SamplingResult { ++ ++ // OTel trace state is a space shared with other vendors with a 256 character limit ++ // We keep the key and values as short as possible while still identifiable ++ public static final String AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY = "xrsr"; ++ ++ private final SamplingDecision decision; ++ private final Attributes attributes; ++ private final String samplingRuleName; ++ ++ private AwsSamplingResult( ++ SamplingDecision decision, Attributes attributes, String samplingRuleName) { ++ this.decision = decision; ++ this.attributes = attributes; ++ this.samplingRuleName = samplingRuleName; ++ } ++ ++ static AwsSamplingResult create( ++ SamplingDecision decision, Attributes attributes, String samplingRuleName) { ++ return new AwsSamplingResult(decision, attributes, samplingRuleName); ++ } ++ ++ @Override ++ public SamplingDecision getDecision() { ++ return decision; ++ } ++ ++ @Override ++ public Attributes getAttributes() { ++ return attributes; ++ } ++ ++ @Override ++ public TraceState getUpdatedTraceState(TraceState parentTraceState) { ++ if (parentTraceState.get(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY) == null) { ++ return parentTraceState.toBuilder() ++ .put(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY, samplingRuleName) ++ .build(); ++ } ++ return parentTraceState; ++ } ++} +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayAdaptiveSamplingConfig.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayAdaptiveSamplingConfig.java +new file mode 100644 +index 00000000..dc5b7a01 +--- /dev/null ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayAdaptiveSamplingConfig.java +@@ -0,0 +1,148 @@ ++/* ++ * Copyright The OpenTelemetry Authors ++ * SPDX-License-Identifier: Apache-2.0 ++ */ ++ ++package io.opentelemetry.contrib.awsxray; ++ ++import com.fasterxml.jackson.annotation.JsonCreator; ++import com.fasterxml.jackson.annotation.JsonProperty; ++import com.fasterxml.jackson.annotation.JsonValue; ++import com.fasterxml.jackson.databind.annotation.JsonDeserialize; ++import com.fasterxml.jackson.databind.annotation.JsonSerialize; ++import com.google.auto.value.AutoValue; ++import java.util.List; ++import javax.annotation.Nullable; ++ ++@AutoValue ++@JsonSerialize(as = AwsXrayAdaptiveSamplingConfig.class) ++@JsonDeserialize(builder = AutoValue_AwsXrayAdaptiveSamplingConfig.Builder.class) ++public abstract class AwsXrayAdaptiveSamplingConfig { ++ ++ @JsonProperty("version") ++ public abstract double getVersion(); ++ ++ @JsonProperty("anomalyConditions") ++ @Nullable ++ public abstract List getAnomalyConditions(); ++ ++ @JsonProperty("anomalyCaptureLimit") ++ @Nullable ++ public abstract AnomalyCaptureLimit getAnomalyCaptureLimit(); ++ ++ public static Builder builder() { ++ return new AutoValue_AwsXrayAdaptiveSamplingConfig.Builder(); ++ } ++ ++ @AutoValue.Builder ++ public abstract static class Builder { ++ @JsonProperty("version") ++ public abstract Builder setVersion(double value); ++ ++ @JsonProperty("anomalyConditions") ++ public abstract Builder setAnomalyConditions(List value); ++ ++ @JsonProperty("anomalyCaptureLimit") ++ public abstract Builder setAnomalyCaptureLimit(AnomalyCaptureLimit value); ++ ++ public abstract AwsXrayAdaptiveSamplingConfig build(); ++ } ++ ++ @AutoValue ++ @JsonDeserialize( ++ builder = AutoValue_AwsXrayAdaptiveSamplingConfig_AnomalyConditions.Builder.class) ++ public abstract static class AnomalyConditions { ++ @JsonProperty("errorCodeRegex") ++ @Nullable ++ public abstract String getErrorCodeRegex(); ++ ++ @JsonProperty("operations") ++ @Nullable ++ public abstract List getOperations(); ++ ++ @JsonProperty("highLatencyMs") ++ @Nullable ++ public abstract Long getHighLatencyMs(); ++ ++ @JsonProperty("usage") ++ @Nullable ++ public abstract UsageType getUsage(); ++ ++ public static Builder builder() { ++ return new AutoValue_AwsXrayAdaptiveSamplingConfig_AnomalyConditions.Builder(); ++ } ++ ++ @AutoValue.Builder ++ public abstract static class Builder { ++ @JsonProperty("errorCodeRegex") ++ public abstract Builder setErrorCodeRegex(String value); ++ ++ @JsonProperty("operations") ++ public abstract Builder setOperations(List value); ++ ++ @JsonProperty("highLatencyMs") ++ public abstract Builder setHighLatencyMs(Long value); ++ ++ @JsonProperty("usage") ++ public abstract Builder setUsage(UsageType value); ++ ++ public abstract AnomalyConditions build(); ++ } ++ } ++ ++ public enum UsageType { ++ BOTH("both"), ++ SAMPLING_BOOST("sampling-boost"), ++ ANOMALY_TRACE_CAPTURE("anomaly-trace-capture"), ++ NEITHER("neither"); // Not meant to be used by customers ++ ++ private final String value; ++ ++ UsageType(String value) { ++ this.value = value; ++ } ++ ++ @JsonValue ++ public String getValue() { ++ return value; ++ } ++ ++ @JsonCreator ++ public static UsageType fromValue(String value) { ++ for (UsageType type : values()) { ++ if (type.value.equals(value)) { ++ return type; ++ } ++ } ++ throw new IllegalArgumentException("Invalid usage value: " + value); ++ } ++ ++ public static boolean isUsedForBoost(UsageType usage) { ++ return BOTH.equals(usage) || SAMPLING_BOOST.equals(usage); ++ } ++ ++ public static boolean isUsedForAnomalyTraceCapture(UsageType usage) { ++ return BOTH.equals(usage) || ANOMALY_TRACE_CAPTURE.equals(usage); ++ } ++ } ++ ++ @AutoValue ++ @JsonDeserialize( ++ builder = AutoValue_AwsXrayAdaptiveSamplingConfig_AnomalyCaptureLimit.Builder.class) ++ public abstract static class AnomalyCaptureLimit { ++ @JsonProperty("anomalyTracesPerSecond") ++ public abstract int getAnomalyTracesPerSecond(); ++ ++ public static Builder builder() { ++ return new AutoValue_AwsXrayAdaptiveSamplingConfig_AnomalyCaptureLimit.Builder(); ++ } ++ ++ @AutoValue.Builder ++ public abstract static class Builder { ++ @JsonProperty("anomalyTracesPerSecond") ++ public abstract Builder setAnomalyTracesPerSecond(int value); ++ ++ public abstract AnomalyCaptureLimit build(); ++ } ++ } ++} +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java +index ad9b72a2..7864f358 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java +@@ -9,16 +9,22 @@ import io.opentelemetry.api.common.Attributes; + import io.opentelemetry.api.trace.SpanKind; + import io.opentelemetry.context.Context; + import io.opentelemetry.contrib.awsxray.GetSamplingRulesResponse.SamplingRuleRecord; ++import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingBoostStatisticsDocument; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingStatisticsDocument; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingTargetDocument; + import io.opentelemetry.sdk.common.Clock; + import io.opentelemetry.sdk.resources.Resource; ++import io.opentelemetry.sdk.trace.ReadableSpan; + import io.opentelemetry.sdk.trace.data.LinkData; ++import io.opentelemetry.sdk.trace.data.SpanData; ++import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; ++import io.opentelemetry.sdk.trace.export.SpanExporter; + import io.opentelemetry.sdk.trace.samplers.Sampler; + import io.opentelemetry.sdk.trace.samplers.SamplingResult; + import java.io.Closeable; + import java.time.Duration; + import java.time.Instant; ++import java.util.ArrayList; + import java.util.Date; + import java.util.Iterator; + import java.util.List; +@@ -43,6 +49,9 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + + private static final Logger logger = Logger.getLogger(AwsXrayRemoteSampler.class.getName()); + ++ // Default batch size to be same as OTel BSP default ++ private static final int maxExportBatchSize = 512; ++ + private final Resource resource; + private final Clock clock; + private final Sampler initialSampler; +@@ -59,6 +68,9 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + @Nullable private volatile XrayRulesSampler internalXrayRulesSampler; + private volatile Sampler sampler; + ++ @Nullable private AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig; ++ @Nullable private BatchSpanProcessor bsp; ++ + /** + * Returns a {@link AwsXrayRemoteSamplerBuilder} with the given {@link Resource}. This {@link + * Resource} should be the same as what the OpenTelemetry SDK is configured with. +@@ -120,6 +132,40 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + return "AwsXrayRemoteSampler{" + sampler.getDescription() + "}"; + } + ++ public void setAdaptiveSamplingConfig(AwsXrayAdaptiveSamplingConfig config) { ++ if (this.adaptiveSamplingConfig != null) { ++ throw new IllegalStateException("Programming bug - Adaptive sampling config is already set"); ++ } else if (config != null && this.adaptiveSamplingConfig == null) { ++ // Save here and also pass to XrayRulesSampler directly as it already exists ++ this.adaptiveSamplingConfig = config; ++ if (internalXrayRulesSampler != null) { ++ internalXrayRulesSampler.setAdaptiveSamplingConfig(config); ++ } ++ } ++ } ++ ++ public void setSpanExporter(SpanExporter spanExporter) { ++ if (this.bsp != null) { ++ throw new IllegalStateException("Programming bug - BatchSpanProcessor is already set"); ++ } else if (spanExporter != null && this.bsp == null) { ++ this.bsp = ++ BatchSpanProcessor.builder(spanExporter) ++ .setExportUnsampledSpans(true) // Required to capture the unsampled anomaly spans ++ .setMaxExportBatchSize(maxExportBatchSize) ++ .build(); ++ } ++ } ++ ++ public void adaptSampling(ReadableSpan span, SpanData spanData) { ++ if (this.bsp == null) { ++ throw new IllegalStateException( ++ "Programming bug - BatchSpanProcessor is null while trying to adapt sampling"); ++ } ++ if (internalXrayRulesSampler != null) { ++ internalXrayRulesSampler.adaptSampling(span, spanData, this.bsp::onEnd); ++ } ++ } ++ + private void getAndUpdateSampler() { + try { + // No pagination support yet, or possibly ever. +@@ -134,8 +180,8 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + initialSampler, + response.getSamplingRules().stream() + .map(SamplingRuleRecord::getRule) +- .collect(Collectors.toList()))); +- ++ .collect(Collectors.toList()), ++ adaptiveSamplingConfig)); + previousRulesResponse = response; + ScheduledFuture existingFetchTargetsFuture = fetchTargetsFuture; + if (existingFetchTargetsFuture != null) { +@@ -179,14 +225,29 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + XrayRulesSampler xrayRulesSampler = this.internalXrayRulesSampler; + try { + Date now = Date.from(Instant.ofEpochSecond(0, clock.now())); +- List statistics = xrayRulesSampler.snapshot(now); ++ List statisticsSnapshot = ++ xrayRulesSampler.snapshot(now); ++ List statistics = new ArrayList(); ++ List boostStatistics = ++ new ArrayList(); ++ statisticsSnapshot.stream() ++ .forEach( ++ snapshot -> { ++ if (snapshot.getStatisticsDocument() != null) { ++ statistics.add(snapshot.getStatisticsDocument()); ++ } ++ if (snapshot.getBoostStatisticsDocument() != null ++ && snapshot.getBoostStatisticsDocument().getTotalCount() > 0) { ++ boostStatistics.add(snapshot.getBoostStatisticsDocument()); ++ } ++ }); + Set requestedTargetRuleNames = + statistics.stream() + .map(SamplingStatisticsDocument::getRuleName) + .collect(Collectors.toSet()); + +- GetSamplingTargetsResponse response = +- client.getSamplingTargets(GetSamplingTargetsRequest.create(statistics)); ++ GetSamplingTargetsRequest req = GetSamplingTargetsRequest.create(statistics, boostStatistics); ++ GetSamplingTargetsResponse response = client.getSamplingTargets(req); + Map targets = + response.getDocuments().stream() + .collect(Collectors.toMap(SamplingTargetDocument::getRuleName, Function.identity())); +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java +index dca930d5..01835dc2 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java +@@ -62,7 +62,8 @@ abstract class GetSamplingRulesResponse { + @JsonProperty("ServiceName") String serviceName, + @JsonProperty("ServiceType") String serviceType, + @JsonProperty("URLPath") String urlPath, +- @JsonProperty("Version") int version) { ++ @JsonProperty("Version") int version, ++ @JsonProperty("SamplingRateBoost") @Nullable SamplingRateBoost samplingRateBoost) { + return new AutoValue_GetSamplingRulesResponse_SamplingRule( + attributes, + fixedRate, +@@ -76,7 +77,8 @@ abstract class GetSamplingRulesResponse { + serviceName, + serviceType, + urlPath, +- version); ++ version, ++ samplingRateBoost); + } + + abstract Map getAttributes(); +@@ -106,5 +108,23 @@ abstract class GetSamplingRulesResponse { + abstract String getUrlPath(); + + abstract int getVersion(); ++ ++ @Nullable ++ abstract SamplingRateBoost getSamplingRateBoost(); ++ } ++ ++ @AutoValue ++ abstract static class SamplingRateBoost { ++ @JsonCreator ++ static SamplingRateBoost create( ++ @JsonProperty("MaxRate") double maxRate, ++ @JsonProperty("CooldownWindowMinutes") long cooldownWindowMinutes) { ++ return new AutoValue_GetSamplingRulesResponse_SamplingRateBoost( ++ maxRate, cooldownWindowMinutes); ++ } ++ ++ abstract double getMaxRate(); ++ ++ abstract long getCooldownWindowMinutes(); + } + } +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsRequest.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsRequest.java +index 7d1fb7b7..9404f73e 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsRequest.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsRequest.java +@@ -15,14 +15,20 @@ import java.util.List; + @JsonSerialize(as = GetSamplingTargetsRequest.class) + abstract class GetSamplingTargetsRequest { + +- static GetSamplingTargetsRequest create(List documents) { +- return new AutoValue_GetSamplingTargetsRequest(documents); ++ static GetSamplingTargetsRequest create( ++ List documents, ++ List boostDocuments) { ++ return new AutoValue_GetSamplingTargetsRequest(documents, boostDocuments); + } + + // Limit of 25 items + @JsonProperty("SamplingStatisticsDocuments") + abstract List getDocuments(); + ++ // Limit of 25 items ++ @JsonProperty("SamplingBoostStatisticsDocuments") ++ abstract List getBoostDocuments(); ++ + @AutoValue + @JsonSerialize(as = SamplingStatisticsDocument.class) + abstract static class SamplingStatisticsDocument { +@@ -66,4 +72,48 @@ abstract class GetSamplingTargetsRequest { + abstract SamplingStatisticsDocument build(); + } + } ++ ++ @AutoValue ++ @JsonSerialize(as = SamplingBoostStatisticsDocument.class) ++ abstract static class SamplingBoostStatisticsDocument { ++ ++ static SamplingBoostStatisticsDocument.Builder newBuilder() { ++ return new AutoValue_GetSamplingTargetsRequest_SamplingBoostStatisticsDocument.Builder(); ++ } ++ ++ @JsonProperty("RuleName") ++ abstract String getRuleName(); ++ ++ @JsonProperty("ServiceName") ++ abstract String getServiceName(); ++ ++ @JsonProperty("Timestamp") ++ abstract Date getTimestamp(); ++ ++ @JsonProperty("AnomalyCount") ++ abstract long getAnomalyCount(); ++ ++ @JsonProperty("TotalCount") ++ abstract long getTotalCount(); ++ ++ @JsonProperty("SampledAnomalyCount") ++ abstract long getSampledAnomalyCount(); ++ ++ @AutoValue.Builder ++ abstract static class Builder { ++ abstract Builder setRuleName(String ruleName); ++ ++ abstract Builder setServiceName(String serviceName); ++ ++ abstract Builder setTimestamp(Date timestamp); ++ ++ abstract Builder setAnomalyCount(long anomalyCount); ++ ++ abstract Builder setTotalCount(long totalCount); ++ ++ abstract Builder setSampledAnomalyCount(long sampledAnomalyCount); ++ ++ abstract SamplingBoostStatisticsDocument build(); ++ } ++ } + } +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsResponse.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsResponse.java +index c1e178f5..406f07e2 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsResponse.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingTargetsResponse.java +@@ -19,9 +19,11 @@ abstract class GetSamplingTargetsResponse { + static GetSamplingTargetsResponse create( + @JsonProperty("LastRuleModification") Date lastRuleModification, + @JsonProperty("SamplingTargetDocuments") List documents, +- @JsonProperty("UnprocessedStatistics") List unprocessedStatistics) { ++ @JsonProperty("UnprocessedStatistics") List unprocessedStatistics, ++ @JsonProperty("UnprocessedBoostStatistics") @Nullable ++ List unprocessedBoostStatistics) { + return new AutoValue_GetSamplingTargetsResponse( +- lastRuleModification, documents, unprocessedStatistics); ++ lastRuleModification, documents, unprocessedStatistics, unprocessedBoostStatistics); + } + + abstract Date getLastRuleModification(); +@@ -30,6 +32,9 @@ abstract class GetSamplingTargetsResponse { + + abstract List getUnprocessedStatistics(); + ++ @Nullable ++ abstract List getUnprocessedBoostStatistics(); ++ + @AutoValue + abstract static class SamplingTargetDocument { + +@@ -39,9 +44,10 @@ abstract class GetSamplingTargetsResponse { + @JsonProperty("Interval") @Nullable Integer intervalSecs, + @JsonProperty("ReservoirQuota") @Nullable Integer reservoirQuota, + @JsonProperty("ReservoirQuotaTTL") @Nullable Date reservoirQuotaTtl, ++ @JsonProperty("SamplingBoost") @Nullable SamplingBoost samplingBoost, + @JsonProperty("RuleName") String ruleName) { + return new AutoValue_GetSamplingTargetsResponse_SamplingTargetDocument( +- fixedRate, intervalSecs, reservoirQuota, reservoirQuotaTtl, ruleName); ++ fixedRate, intervalSecs, reservoirQuota, reservoirQuotaTtl, samplingBoost, ruleName); + } + + abstract double getFixedRate(); +@@ -57,6 +63,9 @@ abstract class GetSamplingTargetsResponse { + @Nullable + abstract Date getReservoirQuotaTtl(); + ++ @Nullable ++ abstract SamplingBoost getSamplingBoost(); ++ + abstract String getRuleName(); + } + +@@ -78,4 +87,18 @@ abstract class GetSamplingTargetsResponse { + + abstract String getRuleName(); + } ++ ++ @AutoValue ++ abstract static class SamplingBoost { ++ @JsonCreator ++ static SamplingBoost create( ++ @JsonProperty("BoostRate") double boostRate, ++ @JsonProperty("BoostRateTTL") Date boostRateTtl) { ++ return new AutoValue_GetSamplingTargetsResponse_SamplingBoost(boostRate, boostRateTtl); ++ } ++ ++ abstract double getBoostRate(); ++ ++ abstract Date getBoostRateTtl(); ++ } + } +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java +index 1d97c4ae..6462c7f3 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java +@@ -11,10 +11,13 @@ import io.opentelemetry.api.common.AttributeKey; + import io.opentelemetry.api.common.Attributes; + import io.opentelemetry.api.trace.SpanKind; + import io.opentelemetry.context.Context; ++import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingBoostStatisticsDocument; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingStatisticsDocument; ++import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingBoost; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingTargetDocument; + import io.opentelemetry.sdk.common.Clock; + import io.opentelemetry.sdk.resources.Resource; ++import io.opentelemetry.sdk.trace.ReadableSpan; + import io.opentelemetry.sdk.trace.data.LinkData; + import io.opentelemetry.sdk.trace.samplers.Sampler; + import io.opentelemetry.sdk.trace.samplers.SamplingDecision; +@@ -76,12 +79,20 @@ final class SamplingRuleApplier { + + private final String clientId; + private final String ruleName; ++ private final String serviceName; + private final Clock clock; + private final Sampler reservoirSampler; + private final long reservoirEndTimeNanos; ++ private final double fixedRate; + private final Sampler fixedRateSampler; + private final boolean borrowing; + ++ // Adaptive sampling related configs ++ private final boolean hasBoost; ++ private final double boostedFixedRate; ++ private final Long boostEndTimeNanos; ++ private final Sampler boostedFixedRateSampler; ++ + private final Map attributeMatchers; + private final Matcher urlPathMatcher; + private final Matcher serviceNameMatcher; +@@ -94,7 +105,11 @@ final class SamplingRuleApplier { + + private final long nextSnapshotTimeNanos; + +- SamplingRuleApplier(String clientId, GetSamplingRulesResponse.SamplingRule rule, Clock clock) { ++ SamplingRuleApplier( ++ String clientId, ++ GetSamplingRulesResponse.SamplingRule rule, ++ @Nullable String serviceName, ++ Clock clock) { + this.clientId = clientId; + this.clock = clock; + String ruleName = rule.getRuleName(); +@@ -108,6 +123,8 @@ final class SamplingRuleApplier { + } + this.ruleName = ruleName; + ++ this.serviceName = serviceName == null ? "default" : serviceName; ++ + // We don't have a SamplingTarget so are ready to report a snapshot right away. + nextSnapshotTimeNanos = clock.nanoTime(); + +@@ -124,7 +141,15 @@ final class SamplingRuleApplier { + reservoirSampler = Sampler.alwaysOff(); + borrowing = false; + } +- fixedRateSampler = createFixedRate(rule.getFixedRate()); ++ fixedRate = rule.getFixedRate(); ++ fixedRateSampler = createFixedRate(fixedRate); ++ ++ // Check if the rule has a sampling rate boost option ++ hasBoost = rule.getSamplingRateBoost() != null; ++ ++ boostedFixedRate = fixedRate; ++ boostedFixedRateSampler = createFixedRate(fixedRate); ++ boostEndTimeNanos = clock.nanoTime(); + + if (rule.getAttributes().isEmpty()) { + attributeMatchers = Collections.emptyMap(); +@@ -147,11 +172,16 @@ final class SamplingRuleApplier { + private SamplingRuleApplier( + String clientId, + String ruleName, ++ String serviceName, + Clock clock, + Sampler reservoirSampler, + long reservoirEndTimeNanos, ++ double fixedRate, + Sampler fixedRateSampler, + boolean borrowing, ++ double boostedFixedRate, ++ Long boostEndTimeNanos, ++ boolean hasBoost, + Map attributeMatchers, + Matcher urlPathMatcher, + Matcher serviceNameMatcher, +@@ -163,11 +193,16 @@ final class SamplingRuleApplier { + long nextSnapshotTimeNanos) { + this.clientId = clientId; + this.ruleName = ruleName; ++ this.serviceName = serviceName; + this.clock = clock; + this.reservoirSampler = reservoirSampler; + this.reservoirEndTimeNanos = reservoirEndTimeNanos; ++ this.fixedRate = fixedRate; + this.fixedRateSampler = fixedRateSampler; + this.borrowing = borrowing; ++ this.boostedFixedRate = boostedFixedRate; ++ this.boostEndTimeNanos = boostEndTimeNanos; ++ this.hasBoost = hasBoost; + this.attributeMatchers = attributeMatchers; + this.urlPathMatcher = urlPathMatcher; + this.serviceNameMatcher = serviceNameMatcher; +@@ -177,6 +212,7 @@ final class SamplingRuleApplier { + this.resourceArnMatcher = resourceArnMatcher; + this.statistics = statistics; + this.nextSnapshotTimeNanos = nextSnapshotTimeNanos; ++ this.boostedFixedRateSampler = createFixedRate(this.boostedFixedRate); + } + + @SuppressWarnings("deprecation") // TODO +@@ -273,45 +309,84 @@ final class SamplingRuleApplier { + statistics.sampled.increment(); + return result; + } +- result = +- fixedRateSampler.shouldSample( +- parentContext, traceId, name, spanKind, attributes, parentLinks); ++ ++ if (clock.nanoTime() < boostEndTimeNanos) { ++ result = ++ boostedFixedRateSampler.shouldSample( ++ parentContext, traceId, name, spanKind, attributes, parentLinks); ++ } else { ++ result = ++ fixedRateSampler.shouldSample( ++ parentContext, traceId, name, spanKind, attributes, parentLinks); ++ } + if (result.getDecision() != SamplingDecision.DROP) { + statistics.sampled.increment(); + } + return result; + } + ++ void countTrace() { ++ statistics.traces.increment(); ++ } ++ ++ void countAnomalyTrace(ReadableSpan span) { ++ statistics.anomalies.increment(); ++ ++ if (span.getSpanContext().isSampled()) { ++ statistics.anomaliesSampled.increment(); ++ } ++ } ++ + @Nullable +- SamplingStatisticsDocument snapshot(Date now) { ++ SamplingRuleStatisticsSnapshot snapshot(Date now) { + if (clock.nanoTime() < nextSnapshotTimeNanos) { + return null; + } +- return SamplingStatisticsDocument.newBuilder() +- .setClientId(clientId) +- .setRuleName(ruleName) +- .setTimestamp(now) +- // Resetting requests first ensures that sample / borrow rate are positive after the reset. +- // Snapshotting is not concurrent so this ensures they are always positive. +- .setRequestCount(statistics.requests.sumThenReset()) +- .setSampledCount(statistics.sampled.sumThenReset()) +- .setBorrowCount(statistics.borrowed.sumThenReset()) +- .build(); ++ long totalCount = statistics.requests.sumThenReset(); ++ long sampledCount = statistics.sampled.sumThenReset(); ++ long borrowCount = statistics.borrowed.sumThenReset(); ++ long traceCount = statistics.traces.sumThenReset(); ++ long anomalyCount = statistics.anomalies.sumThenReset(); ++ long sampledAnomalyCount = statistics.anomaliesSampled.sumThenReset(); ++ SamplingStatisticsDocument samplingStatistics = ++ SamplingStatisticsDocument.newBuilder() ++ .setClientId(clientId) ++ .setRuleName(ruleName) ++ .setTimestamp(now) ++ // Resetting requests first ensures that sample / borrow rate are positive after the ++ // reset. ++ // Snapshotting is not concurrent so this ensures they are always positive. ++ .setRequestCount(totalCount) ++ .setSampledCount(sampledCount) ++ .setBorrowCount(borrowCount) ++ .build(); ++ SamplingBoostStatisticsDocument boostDoc = ++ SamplingBoostStatisticsDocument.newBuilder() ++ .setRuleName(ruleName) ++ .setServiceName(serviceName) ++ .setTimestamp(now) ++ .setTotalCount(traceCount) ++ .setAnomalyCount(anomalyCount) ++ .setSampledAnomalyCount(sampledAnomalyCount) ++ .build(); ++ return new SamplingRuleStatisticsSnapshot(samplingStatistics, boostDoc); + } + + long getNextSnapshotTimeNanos() { + return nextSnapshotTimeNanos; + } + +- SamplingRuleApplier withTarget(SamplingTargetDocument target, Date now) { ++ // currentNanoTime is passed in to ensure all uses of withTarget are used with the same baseline ++ // time reference ++ SamplingRuleApplier withTarget(SamplingTargetDocument target, Date now, long currentNanoTime) { + Sampler newFixedRateSampler = createFixedRate(target.getFixedRate()); + Sampler newReservoirSampler = Sampler.alwaysOff(); +- long newReservoirEndTimeNanos = clock.nanoTime(); ++ long newReservoirEndTimeNanos = currentNanoTime; + // Not well documented but a quota should always come with a TTL + if (target.getReservoirQuota() != null && target.getReservoirQuotaTtl() != null) { + newReservoirSampler = createRateLimited(target.getReservoirQuota()); + newReservoirEndTimeNanos = +- clock.nanoTime() ++ currentNanoTime + + Duration.between(now.toInstant(), target.getReservoirQuotaTtl().toInstant()) + .toNanos(); + } +@@ -319,16 +394,36 @@ final class SamplingRuleApplier { + target.getIntervalSecs() != null + ? TimeUnit.SECONDS.toNanos(target.getIntervalSecs()) + : AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS; +- long newNextSnapshotTimeNanos = clock.nanoTime() + intervalNanos; ++ long newNextSnapshotTimeNanos = currentNanoTime + intervalNanos; ++ ++ double newBoostedFixedRate = fixedRate; ++ long newBoostEndTimeNanos = currentNanoTime; ++ if (target.getSamplingBoost() != null) { ++ SamplingBoost samplingBoostMap = target.getSamplingBoost(); ++ if (samplingBoostMap != null ++ && samplingBoostMap.getBoostRate() >= target.getFixedRate() ++ && samplingBoostMap.getBoostRateTtl() != null) { ++ newBoostedFixedRate = samplingBoostMap.getBoostRate(); ++ newBoostEndTimeNanos = ++ currentNanoTime ++ + Duration.between(now.toInstant(), samplingBoostMap.getBoostRateTtl().toInstant()) ++ .toNanos(); ++ } ++ } + + return new SamplingRuleApplier( + clientId, + ruleName, ++ serviceName, + clock, + newReservoirSampler, + newReservoirEndTimeNanos, ++ fixedRate, + newFixedRateSampler, + /* borrowing= */ false, ++ newBoostedFixedRate, ++ newBoostEndTimeNanos, ++ hasBoost, + attributeMatchers, + urlPathMatcher, + serviceNameMatcher, +@@ -344,11 +439,16 @@ final class SamplingRuleApplier { + return new SamplingRuleApplier( + clientId, + ruleName, ++ serviceName, + clock, + reservoirSampler, + reservoirEndTimeNanos, ++ fixedRate, + fixedRateSampler, + borrowing, ++ boostedFixedRate, ++ boostEndTimeNanos, ++ hasBoost, + attributeMatchers, + urlPathMatcher, + serviceNameMatcher, +@@ -364,6 +464,15 @@ final class SamplingRuleApplier { + return ruleName; + } + ++ // For testing ++ String getServiceName() { ++ return serviceName; ++ } ++ ++ boolean hasBoost() { ++ return hasBoost; ++ } ++ + @Nullable + private static String getArn(Attributes attributes, Resource resource) { + String arn = resource.getAttributes().get(AWS_ECS_CONTAINER_ARN); +@@ -515,5 +624,30 @@ final class SamplingRuleApplier { + final LongAdder requests = new LongAdder(); + final LongAdder sampled = new LongAdder(); + final LongAdder borrowed = new LongAdder(); ++ final LongAdder traces = new LongAdder(); ++ final LongAdder anomalies = new LongAdder(); ++ final LongAdder anomaliesSampled = new LongAdder(); ++ } ++ ++ static class SamplingRuleStatisticsSnapshot { ++ final SamplingStatisticsDocument statisticsDocument; ++ final SamplingBoostStatisticsDocument boostStatisticsDocument; ++ ++ // final SamplingBoostStatisticsDocument boostStatisticsDocument; ++ ++ SamplingRuleStatisticsSnapshot( ++ SamplingStatisticsDocument statisticsDocument, ++ SamplingBoostStatisticsDocument boostStatisticsDocument) { ++ this.statisticsDocument = statisticsDocument; ++ this.boostStatisticsDocument = boostStatisticsDocument; ++ } ++ ++ SamplingStatisticsDocument getStatisticsDocument() { ++ return statisticsDocument; ++ } ++ ++ SamplingBoostStatisticsDocument getBoostStatisticsDocument() { ++ return boostStatisticsDocument; ++ } + } + } +diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java +index 75977dc0..9620ba2b 100644 +--- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java ++++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java +@@ -5,42 +5,79 @@ + + package io.opentelemetry.contrib.awsxray; + ++import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; ++import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; ++ ++import com.github.benmanes.caffeine.cache.Cache; ++import com.github.benmanes.caffeine.cache.Caffeine; ++import io.opentelemetry.api.common.AttributeKey; + import io.opentelemetry.api.common.Attributes; ++import io.opentelemetry.api.trace.Span; + import io.opentelemetry.api.trace.SpanKind; ++import io.opentelemetry.api.trace.StatusCode; + import io.opentelemetry.context.Context; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingTargetDocument; + import io.opentelemetry.sdk.common.Clock; + import io.opentelemetry.sdk.resources.Resource; ++import io.opentelemetry.sdk.trace.ReadableSpan; + import io.opentelemetry.sdk.trace.data.LinkData; ++import io.opentelemetry.sdk.trace.data.SpanData; + import io.opentelemetry.sdk.trace.samplers.Sampler; + import io.opentelemetry.sdk.trace.samplers.SamplingResult; ++import java.nio.charset.StandardCharsets; ++import java.security.MessageDigest; ++import java.security.NoSuchAlgorithmException; ++import java.time.Duration; + import java.util.Arrays; + import java.util.Comparator; + import java.util.Date; ++import java.util.HashMap; + import java.util.List; + import java.util.Map; + import java.util.Objects; + import java.util.Set; ++import java.util.function.Consumer; + import java.util.logging.Level; + import java.util.logging.Logger; + import java.util.stream.Collectors; ++import javax.annotation.Nullable; + + final class XrayRulesSampler implements Sampler { + + private static final Logger logger = Logger.getLogger(XrayRulesSampler.class.getName()); + ++ public static final AttributeKey AWS_XRAY_SAMPLING_RULE = ++ AttributeKey.stringKey("aws.xray.sampling_rule"); ++ ++ // Used for generating operation ++ private static final String UNKNOWN_OPERATION = "UnknownOperation"; ++ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); ++ private static final AttributeKey HTTP_TARGET = AttributeKey.stringKey("http.target"); ++ private static final AttributeKey HTTP_REQUEST_METHOD = ++ AttributeKey.stringKey("http.request.method"); ++ private static final AttributeKey HTTP_METHOD = AttributeKey.stringKey("http.method"); ++ + private final String clientId; + private final Resource resource; + private final Clock clock; + private final Sampler fallbackSampler; + private final SamplingRuleApplier[] ruleAppliers; ++ private final Map ruleToHashMap; ++ private final Map hashToRuleMap; ++ ++ private final boolean adaptiveSamplingRuleExists; ++ private final Cache traceUsageCache; ++ ++ @Nullable private AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig; ++ @Nullable private RateLimiter anomalyCaptureRateLimiter; + + XrayRulesSampler( + String clientId, + Resource resource, + Clock clock, + Sampler fallbackSampler, +- List rules) { ++ List rules, ++ @Nullable AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig) { + this( + clientId, + resource, +@@ -49,8 +86,19 @@ final class XrayRulesSampler implements Sampler { + rules.stream() + // Lower priority value takes precedence so normal ascending sort. + .sorted(Comparator.comparingInt(GetSamplingRulesResponse.SamplingRule::getPriority)) +- .map(rule -> new SamplingRuleApplier(clientId, rule, clock)) +- .toArray(SamplingRuleApplier[]::new)); ++ .map( ++ rule -> ++ new SamplingRuleApplier( ++ clientId, rule, resource.getAttribute(SERVICE_NAME), clock)) ++ .toArray(SamplingRuleApplier[]::new), ++ createRuleHashMaps(rules), ++ rules.stream().anyMatch(r -> r.getSamplingRateBoost() != null), ++ adaptiveSamplingConfig, ++ Caffeine.newBuilder() ++ .maximumSize(100_000) ++ .ticker(clock::nanoTime) ++ .expireAfterWrite(Duration.ofMinutes(10)) ++ .build()); + } + + private XrayRulesSampler( +@@ -58,12 +106,36 @@ final class XrayRulesSampler implements Sampler { + Resource resource, + Clock clock, + Sampler fallbackSampler, +- SamplingRuleApplier[] ruleAppliers) { ++ SamplingRuleApplier[] ruleAppliers, ++ Map ruleToHashMap, ++ boolean adaptiveSamplingRuleExists, ++ @Nullable AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig, ++ Cache traceUsageCache) { + this.clientId = clientId; + this.resource = resource; + this.clock = clock; + this.fallbackSampler = fallbackSampler; + this.ruleAppliers = ruleAppliers; ++ this.ruleToHashMap = ruleToHashMap; ++ this.hashToRuleMap = new HashMap<>(); ++ for (Map.Entry entry : ruleToHashMap.entrySet()) { ++ this.hashToRuleMap.put(entry.getValue(), entry.getKey()); ++ } ++ this.adaptiveSamplingRuleExists = adaptiveSamplingRuleExists; ++ this.adaptiveSamplingConfig = adaptiveSamplingConfig; ++ this.traceUsageCache = traceUsageCache; ++ ++ // Initialize anomaly capture rate limiter ++ if (this.adaptiveSamplingConfig != null ++ && this.adaptiveSamplingConfig.getAnomalyCaptureLimit() == null) { ++ this.anomalyCaptureRateLimiter = new RateLimiter(1, 1, clock); ++ } else if (adaptiveSamplingConfig != null ++ && adaptiveSamplingConfig.getAnomalyCaptureLimit() != null) { ++ int anomalyTracesPerSecond = ++ adaptiveSamplingConfig.getAnomalyCaptureLimit().getAnomalyTracesPerSecond(); ++ this.anomalyCaptureRateLimiter = ++ new RateLimiter(anomalyTracesPerSecond, anomalyTracesPerSecond, clock); ++ } + } + + @Override +@@ -74,10 +146,36 @@ final class XrayRulesSampler implements Sampler { + SpanKind spanKind, + Attributes attributes, + List parentLinks) { ++ String upstreamMatchedRule = ++ Span.fromContext(parentContext) ++ .getSpanContext() ++ .getTraceState() ++ .get(AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY); + for (SamplingRuleApplier applier : ruleAppliers) { + if (applier.matches(attributes, resource)) { +- return applier.shouldSample( +- parentContext, traceId, name, spanKind, attributes, parentLinks); ++ SamplingResult result = ++ applier.shouldSample(parentContext, traceId, name, spanKind, attributes, parentLinks); ++ ++ // If the trace state has a sampling rule reference, propagate it ++ // Otherwise, encode and propagate the matched sampling rule using AwsSamplingResult ++ String ruleToPropagate; ++ if (upstreamMatchedRule != null) { ++ ruleToPropagate = hashToRuleMap.getOrDefault(upstreamMatchedRule, applier.getRuleName()); ++ } else { ++ ruleToPropagate = applier.getRuleName(); ++ } ++ String hashedRule = ruleToHashMap.getOrDefault(ruleToPropagate, ruleToPropagate); ++ if (this.adaptiveSamplingConfig != null ++ && this.adaptiveSamplingConfig.getAnomalyCaptureLimit() != null) { ++ // If the span is capturable based on local SDK config, add sampling rule attribute ++ return AwsSamplingResult.create( ++ result.getDecision(), ++ result.getAttributes().toBuilder() ++ .put(AWS_XRAY_SAMPLING_RULE.getKey(), ruleToPropagate) ++ .build(), ++ hashedRule); ++ } ++ return AwsSamplingResult.create(result.getDecision(), result.getAttributes(), hashedRule); + } + } + +@@ -96,7 +194,184 @@ final class XrayRulesSampler implements Sampler { + return "XrayRulesSampler{" + Arrays.toString(ruleAppliers) + "}"; + } + +- List snapshot(Date now) { ++ void setAdaptiveSamplingConfig(AwsXrayAdaptiveSamplingConfig config) { ++ if (this.adaptiveSamplingConfig != null) { ++ throw new IllegalStateException("Programming bug - Adaptive sampling config is already set"); ++ } else if (config != null && this.adaptiveSamplingConfig == null) { ++ this.adaptiveSamplingConfig = config; ++ ++ // Initialize anomaly capture rate limiter if error capture limit is configured ++ if (config.getAnomalyCaptureLimit() != null) { ++ int anomalyTracesPerSecond = config.getAnomalyCaptureLimit().getAnomalyTracesPerSecond(); ++ this.anomalyCaptureRateLimiter = ++ new RateLimiter(anomalyTracesPerSecond, anomalyTracesPerSecond, clock); ++ } ++ } ++ } ++ ++ void adaptSampling(ReadableSpan span, SpanData spanData, Consumer spanBatcher) { ++ if (!adaptiveSamplingRuleExists && this.adaptiveSamplingConfig == null) { ++ return; ++ } ++ Long statusCode = spanData.getAttributes().get(HTTP_RESPONSE_STATUS_CODE); ++ ++ boolean shouldBoostSampling = false; ++ boolean shouldCaptureAnomalySpan = false; ++ ++ List anomalyConditions = ++ adaptiveSamplingConfig != null ? adaptiveSamplingConfig.getAnomalyConditions() : null; ++ // Empty list -> no conditions will apply and we will not do anything ++ if (anomalyConditions != null && !anomalyConditions.isEmpty()) { ++ String operation = spanData.getAttributes().get(AwsAttributeKeys.AWS_LOCAL_OPERATION); ++ if (operation == null) { ++ operation = generateIngressOperation(spanData); ++ } ++ for (AwsXrayAdaptiveSamplingConfig.AnomalyConditions condition : anomalyConditions) { ++ // Skip condition if it would only re-apply action already being taken ++ if ((shouldBoostSampling ++ && AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST.equals( ++ condition.getUsage())) ++ || (shouldCaptureAnomalySpan ++ && AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE.equals( ++ condition.getUsage()))) { ++ continue; ++ } ++ // Check if the operation matches any in the list or if operations list is null (match all) ++ List operations = condition.getOperations(); ++ if (!(operations == null || operations.isEmpty() || operations.contains(operation))) { ++ continue; ++ } ++ // Check if any anomalyConditions detect an anomaly either through error code or latency ++ boolean isAnomaly = false; ++ ++ String errorCodeRegex = condition.getErrorCodeRegex(); ++ if (statusCode != null && errorCodeRegex != null) { ++ isAnomaly = statusCode.toString().matches(errorCodeRegex); ++ } ++ ++ Long highLatencyMs = condition.getHighLatencyMs(); ++ if (highLatencyMs != null) { ++ isAnomaly = ++ (errorCodeRegex == null || isAnomaly) ++ && (span.getLatencyNanos() / 1_000_000.0) >= highLatencyMs; ++ } ++ ++ if (isAnomaly) { ++ AwsXrayAdaptiveSamplingConfig.UsageType usage = condition.getUsage(); ++ if (usage != null) { ++ switch (usage) { ++ case BOTH: ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ break; ++ case SAMPLING_BOOST: ++ shouldBoostSampling = true; ++ break; ++ case ANOMALY_TRACE_CAPTURE: ++ shouldCaptureAnomalySpan = true; ++ break; ++ default: // do nothing ++ } ++ } else { ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ } ++ } ++ if (shouldBoostSampling && shouldCaptureAnomalySpan) { ++ break; ++ } ++ } ++ } else if ((statusCode != null && statusCode > 499) ++ || (statusCode == null ++ && spanData.getStatus() != null ++ && StatusCode.ERROR.equals(spanData.getStatus().getStatusCode()))) { ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ } ++ ++ String traceId = spanData.getTraceId(); ++ AwsXrayAdaptiveSamplingConfig.UsageType existingUsage = traceUsageCache.getIfPresent(traceId); ++ boolean isNewTrace = existingUsage == null; ++ ++ // Anomaly Capture ++ boolean isSpanCaptured = false; ++ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForAnomalyTraceCapture(existingUsage) ++ || (shouldCaptureAnomalySpan ++ && !span.getSpanContext().isSampled() ++ && anomalyCaptureRateLimiter != null ++ && anomalyCaptureRateLimiter.trySpend(1))) { ++ spanBatcher.accept(span); ++ isSpanCaptured = true; ++ } ++ ++ // Sampling Boost ++ boolean isCountedAsAnomalyForBoost = false; ++ if (shouldBoostSampling || isNewTrace) { ++ String traceStateValue = ++ span.getSpanContext() ++ .getTraceState() ++ .get(AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY); ++ String ruleNameForBoostStats = ++ traceStateValue != null ++ ? hashToRuleMap.getOrDefault(traceStateValue, traceStateValue) ++ : traceStateValue; ++ SamplingRuleApplier ruleToReportTo = null; ++ SamplingRuleApplier matchedRule = null; ++ for (SamplingRuleApplier applier : ruleAppliers) { ++ // Rule propagated from when sampling decision was made, otherwise the matched rule ++ if (applier.getRuleName().equals(ruleNameForBoostStats)) { ++ ruleToReportTo = applier; ++ break; ++ } ++ if (applier.matches(spanData.getAttributes(), resource)) { ++ matchedRule = applier; ++ } ++ } ++ if (ruleToReportTo == null) { ++ if (matchedRule == null) { ++ logger.log( ++ Level.FINE, ++ "No sampling rule matched the request. This is a bug in either the OpenTelemetry SDK or X-Ray."); ++ } else { ++ ruleToReportTo = matchedRule; ++ } ++ } ++ if (shouldBoostSampling ++ && ruleToReportTo != null ++ && ruleToReportTo.hasBoost() ++ && !AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForBoost(existingUsage)) { ++ ruleToReportTo.countAnomalyTrace(span); ++ isCountedAsAnomalyForBoost = true; ++ } ++ if (isNewTrace && ruleToReportTo != null && ruleToReportTo.hasBoost()) { ++ ruleToReportTo.countTrace(); ++ } ++ } ++ ++ // Any interaction with a cache entry will reset the expiration timer of that entry ++ if (isSpanCaptured && isCountedAsAnomalyForBoost) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else if (isSpanCaptured) { ++ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForBoost(existingUsage)) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else { ++ this.traceUsageCache.put( ++ traceId, AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE); ++ } ++ } else if (isCountedAsAnomalyForBoost) { ++ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForAnomalyTraceCapture(existingUsage)) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST); ++ } ++ } else if (existingUsage != null) { ++ this.traceUsageCache.put(traceId, existingUsage); ++ } else { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.NEITHER); ++ } ++ } ++ ++ List snapshot(Date now) { + return Arrays.stream(ruleAppliers) + .map(rule -> rule.snapshot(now)) + .filter(Objects::nonNull) +@@ -115,15 +390,16 @@ final class XrayRulesSampler implements Sampler { + Map ruleTargets, + Set requestedTargetRuleNames, + Date now) { ++ long currentNanoTime = clock.nanoTime(); + long defaultNextSnapshotTimeNanos = +- clock.nanoTime() + AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS; ++ currentNanoTime + AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS; + SamplingRuleApplier[] newAppliers = + Arrays.stream(ruleAppliers) + .map( + rule -> { + SamplingTargetDocument target = ruleTargets.get(rule.getRuleName()); + if (target != null) { +- return rule.withTarget(target, now); ++ return rule.withTarget(target, now, currentNanoTime); + } + if (requestedTargetRuleNames.contains(rule.getRuleName())) { + // In practice X-Ray should return a target for any rule we requested but +@@ -135,6 +411,92 @@ final class XrayRulesSampler implements Sampler { + return rule; + }) + .toArray(SamplingRuleApplier[]::new); +- return new XrayRulesSampler(clientId, resource, clock, fallbackSampler, newAppliers); ++ return new XrayRulesSampler( ++ clientId, ++ resource, ++ clock, ++ fallbackSampler, ++ newAppliers, ++ ruleToHashMap, ++ adaptiveSamplingRuleExists, ++ adaptiveSamplingConfig, ++ traceUsageCache); ++ } ++ ++ static boolean isKeyPresent(SpanData span, AttributeKey key) { ++ return span.getAttributes().get(key) != null; ++ } ++ ++ private static String generateIngressOperation(SpanData span) { ++ String operation = UNKNOWN_OPERATION; ++ if (isKeyPresent(span, URL_PATH) || isKeyPresent(span, HTTP_TARGET)) { ++ String httpTarget = ++ isKeyPresent(span, URL_PATH) ++ ? span.getAttributes().get(URL_PATH) ++ : span.getAttributes().get(HTTP_TARGET); ++ // get the first part from API path string as operation value ++ // the more levels/parts we get from API path the higher chance for getting high cardinality ++ // data ++ if (httpTarget != null) { ++ operation = extractApiPathValue(httpTarget); ++ if (isKeyPresent(span, HTTP_REQUEST_METHOD) || isKeyPresent(span, HTTP_METHOD)) { ++ String httpMethod = ++ isKeyPresent(span, HTTP_REQUEST_METHOD) ++ ? span.getAttributes().get(HTTP_REQUEST_METHOD) ++ : span.getAttributes().get(HTTP_METHOD); ++ if (httpMethod != null) { ++ operation = httpMethod + " " + operation; ++ } ++ } ++ } ++ } ++ return operation; ++ } ++ ++ private static String extractApiPathValue(String httpTarget) { ++ if (httpTarget == null || httpTarget.isEmpty()) { ++ return "/"; ++ } ++ String[] paths = httpTarget.split("/"); ++ if (paths.length > 1) { ++ return "/" + paths[1]; ++ } ++ return "/"; ++ } ++ ++ private static Map createRuleHashMaps( ++ List rules) { ++ Map ruleToHashMap = new HashMap<>(); ++ for (GetSamplingRulesResponse.SamplingRule rule : rules) { ++ String ruleName = rule.getRuleName(); ++ if (ruleName != null) { ++ ruleToHashMap.put(ruleName, hashRuleName(ruleName)); ++ } ++ } ++ return ruleToHashMap; ++ } ++ ++ static String hashRuleName(String ruleName) { ++ try { ++ MessageDigest digest = MessageDigest.getInstance("SHA-256"); ++ byte[] hash = digest.digest(ruleName.getBytes(StandardCharsets.UTF_8)); ++ StringBuilder hexString = new StringBuilder(); ++ for (int i = 0; i < Math.min(hash.length, 8); i++) { ++ String hex = Integer.toHexString(0xff & hash[i]); ++ if (hex.length() == 1) { ++ hexString.append('0'); ++ } ++ hexString.append(hex); ++ } ++ return hexString.toString(); ++ } catch (NoSuchAlgorithmException e) { ++ return ruleName; ++ } ++ } ++ ++ // For testing ++ Cache getTraceUsageCache() { ++ traceUsageCache.cleanUp(); ++ return traceUsageCache; + } + } +diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java +index 4e5cd13b..ec256fe0 100644 +--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java ++++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java +@@ -7,7 +7,10 @@ package io.opentelemetry.contrib.awsxray; + + import static java.util.Objects.requireNonNull; + import static org.assertj.core.api.Assertions.assertThat; ++import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode; + import static org.awaitility.Awaitility.await; ++import static org.junit.jupiter.api.Assertions.assertThrows; ++import static org.mockito.Mockito.mock; + + import com.google.common.io.ByteStreams; + import com.linecorp.armeria.common.HttpResponse; +@@ -21,6 +24,9 @@ import io.opentelemetry.api.trace.SpanKind; + import io.opentelemetry.api.trace.TraceId; + import io.opentelemetry.context.Context; + import io.opentelemetry.sdk.resources.Resource; ++import io.opentelemetry.sdk.trace.ReadableSpan; ++import io.opentelemetry.sdk.trace.data.SpanData; ++import io.opentelemetry.sdk.trace.export.SpanExporter; + import io.opentelemetry.sdk.trace.samplers.Sampler; + import io.opentelemetry.sdk.trace.samplers.SamplingDecision; + import java.io.IOException; +@@ -187,6 +193,31 @@ class AwsXrayRemoteSamplerTest { + } + } + ++ void setAndResetSpanExporter() { ++ try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { ++ // Setting span exporter should only work once ++ sampler.setSpanExporter(mock(SpanExporter.class)); ++ assertThrows( ++ IllegalStateException.class, () -> sampler.setSpanExporter(mock(SpanExporter.class))); ++ } ++ } ++ ++ @Test ++ void adaptSamplingWithoutSpanExporter() { ++ assertThrows( ++ IllegalStateException.class, ++ () -> sampler.adaptSampling(mock(ReadableSpan.class), mock(SpanData.class))); ++ } ++ ++ @Test ++ void adaptSamplingWithSpanExporter() { ++ try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { ++ sampler.setSpanExporter(mock(SpanExporter.class)); ++ assertThatCode(() -> sampler.adaptSampling(mock(ReadableSpan.class), mock(SpanData.class))) ++ .doesNotThrowAnyException(); ++ } ++ } ++ + // https://github.com/open-telemetry/opentelemetry-java-contrib/issues/376 + @Test + void testJitterTruncation() { +@@ -206,6 +237,16 @@ class AwsXrayRemoteSamplerTest { + } + } + ++ @Test ++ void setAdaptiveSamplingConfig() { ++ try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder().setVersion(1.0).build(); ++ sampler.setAdaptiveSamplingConfig(config); ++ assertThrows(IllegalStateException.class, () -> sampler.setAdaptiveSamplingConfig(config)); ++ } ++ } ++ + private static SamplingDecision doSample(Sampler sampler, String name) { + return sampler + .shouldSample( +diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java +index 920a5ffd..dcc7118a 100644 +--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java ++++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java +@@ -15,18 +15,25 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_ + import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_HOST_NAME; + import static org.assertj.core.api.Assertions.assertThat; + import static org.awaitility.Awaitility.await; ++import static org.mockito.Mockito.mock; ++import static org.mockito.Mockito.when; + + import com.fasterxml.jackson.databind.ObjectMapper; + import io.opentelemetry.api.common.AttributeKey; + import io.opentelemetry.api.common.Attributes; + import io.opentelemetry.api.common.AttributesBuilder; ++import io.opentelemetry.api.trace.SpanContext; + import io.opentelemetry.api.trace.SpanKind; ++import io.opentelemetry.api.trace.TraceFlags; + import io.opentelemetry.api.trace.TraceId; ++import io.opentelemetry.api.trace.TraceState; + import io.opentelemetry.context.Context; ++import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingBoost; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingTargetDocument; + import io.opentelemetry.sdk.common.Clock; + import io.opentelemetry.sdk.resources.Resource; + import io.opentelemetry.sdk.testing.time.TestClock; ++import io.opentelemetry.sdk.trace.ReadableSpan; + import io.opentelemetry.sdk.trace.samplers.SamplingDecision; + import io.opentelemetry.sdk.trace.samplers.SamplingResult; + import io.opentelemetry.semconv.HttpAttributes; +@@ -37,6 +44,7 @@ import java.io.IOException; + import java.io.UncheckedIOException; + import java.time.Duration; + import java.time.Instant; ++import java.time.temporal.ChronoUnit; + import java.util.Collections; + import java.util.Date; + import java.util.concurrent.TimeUnit; +@@ -50,6 +58,7 @@ class SamplingRuleApplierTest { + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + private static final String CLIENT_ID = "test-client-id"; ++ private static final String TEST_SERVICE_NAME = "test-service-name"; + + @Nested + @SuppressWarnings("ClassCanBeStatic") +@@ -57,7 +66,10 @@ class SamplingRuleApplierTest { + + private final SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-exactmatch.json"), Clock.getDefault()); ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-exactmatch.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); + + private final Resource resource = + Resource.builder() +@@ -91,7 +103,8 @@ class SamplingRuleApplierTest { + .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); + + Date now = new Date(); +- GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); ++ GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = ++ applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -100,7 +113,7 @@ class SamplingRuleApplierTest { + assertThat(statistics.getBorrowCount()).isEqualTo(0); + + // Reset +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getRequestCount()).isEqualTo(0); + assertThat(statistics.getSampledCount()).isEqualTo(0); + assertThat(statistics.getBorrowCount()).isEqualTo(0); +@@ -108,7 +121,7 @@ class SamplingRuleApplierTest { + doSample(applier); + doSample(applier); + now = new Date(); +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -283,7 +296,10 @@ class SamplingRuleApplierTest { + + private final SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-wildcards.json"), Clock.getDefault()); ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-wildcards.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); + + private final Resource resource = + Resource.builder() +@@ -316,7 +332,8 @@ class SamplingRuleApplierTest { + assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); + + Date now = new Date(); +- GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); ++ GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = ++ applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -325,7 +342,7 @@ class SamplingRuleApplierTest { + assertThat(statistics.getBorrowCount()).isEqualTo(0); + + // Reset +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getRequestCount()).isEqualTo(0); + assertThat(statistics.getSampledCount()).isEqualTo(0); + assertThat(statistics.getBorrowCount()).isEqualTo(0); +@@ -333,7 +350,7 @@ class SamplingRuleApplierTest { + doSample(applier); + doSample(applier); + now = new Date(); +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -626,7 +643,10 @@ class SamplingRuleApplierTest { + + private final SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-awslambda.json"), Clock.getDefault()); ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-awslambda.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); + + private final Resource resource = + Resource.builder() +@@ -677,7 +697,10 @@ class SamplingRuleApplierTest { + void borrowing() { + SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), Clock.getDefault()); ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-reservoir.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); + + // Borrow + assertThat(doSample(applier)) +@@ -688,7 +711,8 @@ class SamplingRuleApplierTest { + assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); + + Date now = new Date(); +- GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); ++ GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = ++ applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -697,7 +721,7 @@ class SamplingRuleApplierTest { + assertThat(statistics.getBorrowCount()).isEqualTo(1); + + // Reset +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getRequestCount()).isEqualTo(0); + assertThat(statistics.getSampledCount()).isEqualTo(0); + assertThat(statistics.getBorrowCount()).isEqualTo(0); +@@ -713,7 +737,7 @@ class SamplingRuleApplierTest { + }); + + now = new Date(); +- statistics = applier.snapshot(now); ++ statistics = applier.snapshot(now).getStatisticsDocument(); + assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); + assertThat(statistics.getRuleName()).isEqualTo("Test"); + assertThat(statistics.getTimestamp()).isEqualTo(now); +@@ -727,7 +751,7 @@ class SamplingRuleApplierTest { + TestClock clock = TestClock.create(); + SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), clock); ++ CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), TEST_SERVICE_NAME, clock); + // No target yet, borrows from reservoir every second. + assertThat(doSample(applier)) + .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); +@@ -746,8 +770,8 @@ class SamplingRuleApplierTest { + + // Got a target! + SamplingTargetDocument target = +- SamplingTargetDocument.create(0.0, 5, 2, Date.from(now.plusSeconds(10)), "test"); +- applier = applier.withTarget(target, Date.from(now)); ++ SamplingTargetDocument.create(0.0, 5, 2, Date.from(now.plusSeconds(10)), null, "test"); ++ applier = applier.withTarget(target, Date.from(now), clock.nanoTime()); + // Statistics not expired yet + assertThat(applier.snapshot(Date.from(now))).isNull(); + +@@ -786,7 +810,7 @@ class SamplingRuleApplierTest { + TestClock clock = TestClock.create(); + SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), clock); ++ CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), TEST_SERVICE_NAME, clock); + // No target yet, borrows from reservoir every second. + assertThat(doSample(applier)) + .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); +@@ -804,8 +828,8 @@ class SamplingRuleApplierTest { + assertThat(applier.snapshot(Date.from(now.plus(Duration.ofMinutes(30))))).isNotNull(); + + // Got a target! +- SamplingTargetDocument target = SamplingTargetDocument.create(0.0, 5, null, null, "test"); +- applier = applier.withTarget(target, Date.from(now)); ++ SamplingTargetDocument target = SamplingTargetDocument.create(0.0, 5, null, null, null, "test"); ++ applier = applier.withTarget(target, Date.from(now), clock.nanoTime()); + // No reservoir, always use fixed rate (drop) + assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); + assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); +@@ -815,12 +839,105 @@ class SamplingRuleApplierTest { + assertThat(applier.snapshot(Date.from(now))).isNotNull(); + } + ++ @Test ++ void ruleWithBoost() { ++ TestClock clock = TestClock.create(); ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, readSamplingRule("/sampling-rule-boost.json"), TEST_SERVICE_NAME, clock); ++ // No reservoir, always use fixed rate (drop) ++ assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ ++ Instant now = Instant.ofEpochSecond(0, clock.now()); ++ ++ // Got a target! ++ // Boost raises sampling rate to 100% for 20 seconds ++ SamplingTargetDocument target = ++ SamplingTargetDocument.create( ++ 0.0, ++ 5, ++ null, ++ null, ++ SamplingBoost.create(1.0, Date.from(now.plus(20, ChronoUnit.SECONDS))), ++ "test"); ++ applier = applier.withTarget(target, Date.from(now), clock.nanoTime()); ++ ++ // We should start sampling at this point ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ // After waiting 10 seconds, we should still be sampling ++ clock.advance(Duration.ofSeconds(10)); ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ // After 30 seconds, we should stop sampling ++ clock.advance(Duration.ofSeconds(20)); ++ assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ } ++ ++ @Test ++ void countTrace() { ++ TestClock clock = TestClock.create(); ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, readSamplingRule("/sampling-rule-boost.json"), TEST_SERVICE_NAME, clock); ++ ++ Instant now = Instant.ofEpochSecond(0, clock.now()); ++ ++ SamplingRuleApplier.SamplingRuleStatisticsSnapshot snapshot = applier.snapshot(Date.from(now)); ++ assertThat(snapshot.getBoostStatisticsDocument().getTotalCount()).isEqualTo(0); ++ ++ applier.countTrace(); ++ applier.countTrace(); ++ applier.countTrace(); ++ ++ snapshot = applier.snapshot(Date.from(now)); ++ assertThat(snapshot.getBoostStatisticsDocument().getTotalCount()).isEqualTo(3); ++ assertThat(snapshot.getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(0); ++ ++ // Snapshotting again should've reset the statistics ++ snapshot = applier.snapshot(Date.from(now)); ++ assertThat(snapshot.getBoostStatisticsDocument().getTotalCount()).isEqualTo(0); ++ assertThat(snapshot.getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(0); ++ ++ // Decision to separate by trace ID is made in XrayRulesSampler class, so we can ignore ++ // trace/span ID in span context here ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ // Mock sampling the first two traces ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getSampled(), TraceState.getDefault())); ++ applier.countTrace(); ++ applier.countAnomalyTrace(readableSpanMock); ++ applier.countTrace(); ++ applier.countAnomalyTrace(readableSpanMock); ++ ++ // Mock not sampling the last trace ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ applier.countTrace(); ++ applier.countAnomalyTrace(readableSpanMock); ++ ++ snapshot = applier.snapshot(Date.from(now)); ++ assertThat(snapshot.getBoostStatisticsDocument().getTotalCount()).isEqualTo(3); ++ assertThat(snapshot.getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(3); ++ assertThat(snapshot.getBoostStatisticsDocument().getSampledAnomalyCount()).isEqualTo(2); ++ } ++ + @Test + void withNextSnapshotTime() { + TestClock clock = TestClock.create(); + SamplingRuleApplier applier = + new SamplingRuleApplier( +- CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), clock); ++ CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), TEST_SERVICE_NAME, clock); + + Instant now = Instant.ofEpochSecond(0, clock.now()); + assertThat(applier.snapshot(Date.from(now))).isNotNull(); +@@ -839,6 +956,71 @@ class SamplingRuleApplierTest { + assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); + } + ++ @Test ++ void hasBoostMethod() { ++ SamplingRuleApplier applierWithBoost = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-boost.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); ++ assertThat(applierWithBoost.hasBoost()).isTrue(); ++ ++ SamplingRuleApplier applierWithoutBoost = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-exactmatch.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); ++ assertThat(applierWithoutBoost.hasBoost()).isFalse(); ++ } ++ ++ @Test ++ void getServiceNameMethod() { ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-exactmatch.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); ++ assertThat(applier.getServiceName()).isEqualTo(TEST_SERVICE_NAME); ++ } ++ ++ @Test ++ void nullRuleName() { ++ GetSamplingRulesResponse.SamplingRule ruleWithNullName = ++ GetSamplingRulesResponse.SamplingRule.create( ++ Collections.emptyMap(), ++ 1.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ null, // null rule name ++ null, ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier(CLIENT_ID, ruleWithNullName, TEST_SERVICE_NAME, Clock.getDefault()); ++ assertThat(applier.getRuleName()).isEqualTo("default"); ++ } ++ ++ @Test ++ void nullServiceName() { ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-exactmatch.json"), ++ null, // null service name ++ Clock.getDefault()); ++ assertThat(applier.getServiceName()).isEqualTo("default"); ++ } ++ + private static SamplingResult doSample(SamplingRuleApplier applier) { + return applier.shouldSample( + Context.current(), +diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java +index 1ca8df34..72ec524b 100644 +--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java ++++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java +@@ -5,17 +5,28 @@ + + package io.opentelemetry.contrib.awsxray; + ++import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; + import static org.assertj.core.api.Assertions.assertThat; ++import static org.junit.jupiter.api.Assertions.assertThrows; ++import static org.mockito.ArgumentMatchers.any; ++import static org.mockito.Mockito.mock; ++import static org.mockito.Mockito.when; + + import io.opentelemetry.api.common.AttributeKey; + import io.opentelemetry.api.common.Attributes; ++import io.opentelemetry.api.trace.SpanContext; + import io.opentelemetry.api.trace.SpanKind; ++import io.opentelemetry.api.trace.TraceFlags; + import io.opentelemetry.api.trace.TraceId; ++import io.opentelemetry.api.trace.TraceState; + import io.opentelemetry.context.Context; ++import io.opentelemetry.contrib.awsxray.GetSamplingRulesResponse.SamplingRateBoost; + import io.opentelemetry.contrib.awsxray.GetSamplingRulesResponse.SamplingRule; + import io.opentelemetry.contrib.awsxray.GetSamplingTargetsResponse.SamplingTargetDocument; + import io.opentelemetry.sdk.resources.Resource; + import io.opentelemetry.sdk.testing.time.TestClock; ++import io.opentelemetry.sdk.trace.ReadableSpan; ++import io.opentelemetry.sdk.trace.data.SpanData; + import io.opentelemetry.sdk.trace.samplers.Sampler; + import io.opentelemetry.sdk.trace.samplers.SamplingDecision; + import io.opentelemetry.sdk.trace.samplers.SamplingResult; +@@ -25,14 +36,20 @@ import java.util.Arrays; + import java.util.Collections; + import java.util.Date; + import java.util.HashMap; ++import java.util.List; + import java.util.Map; + import java.util.concurrent.TimeUnit; ++import java.util.concurrent.atomic.LongAdder; ++import java.util.function.Consumer; + import java.util.stream.Collectors; + import java.util.stream.Stream; + import org.junit.jupiter.api.Test; + + class XrayRulesSamplerTest { + ++ private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); ++ private static final AttributeKey HTTP_METHOD = AttributeKey.stringKey("http.method"); ++ + @Test + void updateTargets() { + SamplingRule rule1 = +@@ -49,7 +66,8 @@ class XrayRulesSamplerTest { + "*", + "*", + "*", +- 1); ++ 1, ++ null); + SamplingRule rule2 = + SamplingRule.create( + Collections.singletonMap("test", "dog-service"), +@@ -64,7 +82,8 @@ class XrayRulesSamplerTest { + "*", + "*", + "*", +- 1); ++ 1, ++ null); + SamplingRule rule3 = + SamplingRule.create( + Collections.singletonMap("test", "*-service"), +@@ -79,7 +98,8 @@ class XrayRulesSamplerTest { + "*", + "*", + "*", +- 1); ++ 1, ++ null); + SamplingRule rule4 = + SamplingRule.create( + Collections.emptyMap(), +@@ -94,7 +114,8 @@ class XrayRulesSamplerTest { + "*", + "*", + "*", +- 1); ++ 1, ++ null); + + TestClock clock = TestClock.create(); + XrayRulesSampler sampler = +@@ -103,22 +124,58 @@ class XrayRulesSamplerTest { + Resource.getDefault(), + clock, + Sampler.alwaysOn(), +- Arrays.asList(rule1, rule4, rule3, rule2)); ++ Arrays.asList(rule1, rule4, rule3, rule2), ++ null); + + assertThat(doSample(sampler, "cat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); + assertThat(doSample(sampler, "cat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); + assertThat(doSample(sampler, "dog-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); + assertThat(doSample(sampler, "dog-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); + assertThat(doSample(sampler, "bat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); + assertThat(doSample(sampler, "bat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); + assertThat(doSample(sampler, "unknown")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("default-rule"))); + + Instant now = Instant.ofEpochSecond(0, clock.now()); + assertThat(sampler.snapshot(Date.from(now))).hasSize(4); +@@ -128,10 +185,10 @@ class XrayRulesSamplerTest { + assertThat(sampler.snapshot(Date.from(now))).hasSize(4); + + SamplingTargetDocument catTarget = +- SamplingTargetDocument.create(0.0, 10, null, null, "cat-rule"); ++ SamplingTargetDocument.create(0.0, 10, null, null, null, "cat-rule"); + + SamplingTargetDocument batTarget = +- SamplingTargetDocument.create(0.0, 5, null, null, "bat-rule"); ++ SamplingTargetDocument.create(0.0, 5, null, null, null, "bat-rule"); + + clock.advance(Duration.ofSeconds(10)); + now = Instant.ofEpochSecond(0, clock.now()); +@@ -145,16 +202,41 @@ class XrayRulesSamplerTest { + .collect(Collectors.toSet()), + Date.from(now)); + assertThat(doSample(sampler, "dog-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); + assertThat(doSample(sampler, "dog-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); + assertThat(doSample(sampler, "unknown")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("default-rule"))); + // Targets overridden to always drop. + assertThat(doSample(sampler, "cat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); + assertThat(doSample(sampler, "bat-service")) +- .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.empty(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); + + // Minimum is batTarget, 5s from now + assertThat(sampler.nextTargetFetchTimeNanos()) +@@ -169,6 +251,867 @@ class XrayRulesSamplerTest { + assertThat(sampler.snapshot(Date.from(now))).hasSize(4); + } + ++ @Test ++ void updateTargetsWithLocalAdaptiveSamplingConfig() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.singletonMap("test", "cat-service"), ++ 1.0, ++ "*", ++ "*", ++ 1, ++ 1, ++ "*", ++ "*", ++ "cat-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ SamplingRule rule2 = ++ SamplingRule.create( ++ Collections.singletonMap("test", "dog-service"), ++ 0.0, ++ "*", ++ "*", ++ 2, ++ 1, ++ "*", ++ "*", ++ "dog-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ SamplingRule rule3 = ++ SamplingRule.create( ++ Collections.singletonMap("test", "*-service"), ++ 1.0, ++ "*", ++ "*", ++ 3, ++ 1, ++ "*", ++ "*", ++ "bat-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ SamplingRule rule4 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 4, ++ 0, ++ "*", ++ "*", ++ "default-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(2) ++ .build()) ++ .build(); ++ ++ TestClock clock = TestClock.create(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1, rule4, rule3, rule2), ++ config); ++ ++ assertThat(doSample(sampler, "cat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); ++ assertThat(doSample(sampler, "cat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); ++ assertThat(doSample(sampler, "dog-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); ++ assertThat(doSample(sampler, "dog-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); ++ assertThat(doSample(sampler, "bat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); ++ assertThat(doSample(sampler, "bat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); ++ assertThat(doSample(sampler, "unknown")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "default-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("default-rule"))); ++ ++ Instant now = Instant.ofEpochSecond(0, clock.now()); ++ assertThat(sampler.snapshot(Date.from(now))).hasSize(4); ++ assertThat(sampler.nextTargetFetchTimeNanos()).isEqualTo(clock.nanoTime()); ++ clock.advance(Duration.ofSeconds(10)); ++ now = Instant.ofEpochSecond(0, clock.now()); ++ assertThat(sampler.snapshot(Date.from(now))).hasSize(4); ++ ++ SamplingTargetDocument catTarget = ++ SamplingTargetDocument.create(0.0, 10, null, null, null, "cat-rule"); ++ ++ SamplingTargetDocument batTarget = ++ SamplingTargetDocument.create(0.0, 5, null, null, null, "bat-rule"); ++ ++ clock.advance(Duration.ofSeconds(10)); ++ now = Instant.ofEpochSecond(0, clock.now()); ++ Map targets = new HashMap<>(); ++ targets.put("cat-rule", catTarget); ++ targets.put("bat-rule", batTarget); ++ sampler = ++ sampler.withTargets( ++ targets, ++ Stream.of("cat-rule", "bat-rule", "dog-rule", "default-rule") ++ .collect(Collectors.toSet()), ++ Date.from(now)); ++ assertThat(doSample(sampler, "dog-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.RECORD_AND_SAMPLE, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); ++ assertThat(doSample(sampler, "dog-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("dog-rule"))); ++ assertThat(doSample(sampler, "unknown")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "default-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("default-rule"))); ++ // Targets overridden to always drop. ++ assertThat(doSample(sampler, "cat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("cat-rule"))); ++ assertThat(doSample(sampler, "bat-service")) ++ .usingRecursiveComparison() ++ .isEqualTo( ++ AwsSamplingResult.create( ++ SamplingDecision.DROP, ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), ++ XrayRulesSampler.hashRuleName("bat-rule"))); ++ ++ // Minimum is batTarget, 5s from now ++ assertThat(sampler.nextTargetFetchTimeNanos()) ++ .isEqualTo(clock.nanoTime() + TimeUnit.SECONDS.toNanos(5)); ++ ++ assertThat(sampler.snapshot(Date.from(now))).isEmpty(); ++ clock.advance(Duration.ofSeconds(5)); ++ now = Instant.ofEpochSecond(0, clock.now()); ++ assertThat(sampler.snapshot(Date.from(now))).hasSize(1); ++ clock.advance(Duration.ofSeconds(5)); ++ now = Instant.ofEpochSecond(0, clock.now()); ++ assertThat(sampler.snapshot(Date.from(now))).hasSize(4); ++ } ++ ++ @Test ++ void noAdaptiveSamplingUsesNoSpace() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.singletonMap("test", "cat-service"), ++ 1.0, ++ "*", ++ "*", ++ 1, ++ 1, ++ "*", ++ "*", ++ "cat-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ ++ TestClock clock = TestClock.create(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ null); ++ ++ LongAdder exportCounter = new LongAdder(); ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ SpanData spanDataMock = mock(SpanData.class); ++ Consumer stubbedConsumer = x -> exportCounter.increment(); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(sampler.getTraceUsageCache().asMap().size()).isEqualTo(0); ++ } ++ ++ @Test ++ void recordErrors() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.singletonMap("test", "cat-service"), ++ 1.0, ++ "*", ++ "*", ++ 1, ++ 1, ++ "*", ++ "*", ++ "cat-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ SamplingRule rule2 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 4, ++ 0, ++ "*", ++ "*", ++ "default-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(2) ++ .build()) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setErrorCodeRegex("^500$") ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.BOTH) ++ .build())) ++ .build(); ++ ++ TestClock clock = TestClock.create(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1, rule2), ++ config); ++ ++ Instant now = Instant.ofEpochSecond(0, clock.now()); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(500L); ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.increment(); ++ ++ // First span should be captured, second should be rate limited ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID3"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ // Only first span captured due to rate limiting ++ assertThat(exportCounter.sumThenReset()).isEqualTo(2L); ++ ++ List snapshot = ++ sampler.snapshot(Date.from(now)); ++ ++ // Rules are ordered by priority, so cat-rule is first ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getTotalCount()).isEqualTo(0); ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(0); ++ ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getSampledAnomalyCount()).isEqualTo(0); ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getTotalCount()).isEqualTo(3); ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(3); ++ ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getSampledAnomalyCount()).isEqualTo(0); ++ ++ // Mock trace coming from upstream service where it was sampled by cat-rule ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID4"); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID4", ++ "SPAN_ID", ++ TraceFlags.getDefault(), ++ TraceState.builder() ++ .put( ++ AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY, ++ XrayRulesSampler.hashRuleName("cat-rule")) ++ .build())); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ ++ // Ensure snapshot shows correctly saved statistics ++ snapshot = sampler.snapshot(Date.from(now)); ++ // cat-rule has no boost config and therefore records no statistics ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getTotalCount()).isEqualTo(0); ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(0); ++ assertThat(snapshot.get(0).getBoostStatisticsDocument().getSampledAnomalyCount()).isEqualTo(0); ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getTotalCount()).isEqualTo(0); ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getAnomalyCount()).isEqualTo(0); ++ assertThat(snapshot.get(1).getBoostStatisticsDocument().getSampledAnomalyCount()).isEqualTo(0); ++ ++ // Assert the trace ID cache is filled with appropriate data and is cleared after TTL passes ++ assertThat(sampler.getTraceUsageCache().asMap().size()).isEqualTo(4); ++ clock.advance(Duration.ofMinutes(100)); ++ assertThat(sampler.getTraceUsageCache().asMap().size()).isEqualTo(0); ++ } ++ ++ @Test ++ void setAdaptiveSamplingConfigTwice() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 1.0, ++ "*", ++ "*", ++ 1, ++ 1, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ null); ++ ++ TestClock clock = TestClock.create(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ null); ++ ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder().setVersion(1.0).build(); ++ sampler.setAdaptiveSamplingConfig(config); ++ assertThrows(IllegalStateException.class, () -> sampler.setAdaptiveSamplingConfig(config)); ++ } ++ ++ @Test ++ void captureErrorBasedOnErrorCodeRegex() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ ++ TestClock clock = TestClock.create(); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(2) ++ .build()) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setErrorCodeRegex("^456$") ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.BOTH) ++ .build())) ++ .build(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ config); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(1L); ++ ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(456L); ++ ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.increment(); ++ ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID3"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sum()).isEqualTo(2L); ++ } ++ ++ @Test ++ void captureErrorBasedOnHighLatency() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ ++ TestClock clock = TestClock.create(); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(2) ++ .build()) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setHighLatencyMs(100L) ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE) ++ .build())) ++ .build(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ config); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(300_000_000L); // 300 ms ++ ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(200L); ++ ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.add(1); ++ ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID3"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sum()).isEqualTo(2L); ++ } ++ ++ @Test ++ void captureErrorBasedOnErroCodeAndLatency() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ ++ TestClock clock = TestClock.create(); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(2) ++ .build()) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setErrorCodeRegex("^456$") ++ .setHighLatencyMs(100L) ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE) ++ .build())) ++ .build(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ config); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(300_000_000L); // 300 ms ++ ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID"); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(200L); ++ ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.add(1); ++ ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sum()).isEqualTo(0L); ++ ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(456L); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(1L); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sum()).isEqualTo(0L); ++ ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(456L); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(300_000_000L); // 300 ms ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID3"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID4"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID5"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sum()).isEqualTo(2L); ++ } ++ ++ @Test ++ void recordAndCaptureErrorBasedOnSeparateConditions() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ ++ TestClock clock = TestClock.create(); ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyCaptureLimit( ++ AwsXrayAdaptiveSamplingConfig.AnomalyCaptureLimit.builder() ++ .setAnomalyTracesPerSecond(10) ++ .build()) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setErrorCodeRegex("^5\\d\\d$") ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST) ++ .build(), ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setErrorCodeRegex("^4\\d\\d$") ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE) ++ .build())) ++ .build(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ config); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID"); ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.add(1); ++ ++ // Boost condition triggered - count new trace + count anomaly ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(511L); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(sampler.getTraceUsageCache().getIfPresent("TRACE_ID")) ++ .isEqualTo(AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(0L); ++ ++ // Anomaly capture triggered - capture and update cache value ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(411L); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(sampler.getTraceUsageCache().getIfPresent("TRACE_ID")) ++ .isEqualTo(AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ ++ // Boost condition triggered - capture span even though anomaly capture not included ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(511L); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(sampler.getTraceUsageCache().getIfPresent("TRACE_ID")) ++ .isEqualTo(AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ ++ // Non-anomaly span - should still be captured since trace is anomalous overall ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(200L); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(sampler.getTraceUsageCache().getIfPresent("TRACE_ID")) ++ .isEqualTo(AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ } ++ ++ @Test ++ void operationFilteringInAdaptSampling() { ++ SamplingRule rule1 = ++ SamplingRule.create( ++ Collections.emptyMap(), ++ 0.0, ++ "*", ++ "*", ++ 1, ++ 0, ++ "*", ++ "*", ++ "test-rule", ++ "*", ++ "*", ++ "*", ++ 1, ++ SamplingRateBoost.create(1, 300)); ++ ++ TestClock clock = TestClock.create(); ++ // Error span capture should default to 1/s ++ AwsXrayAdaptiveSamplingConfig config = ++ AwsXrayAdaptiveSamplingConfig.builder() ++ .setVersion(1.0) ++ .setAnomalyConditions( ++ Arrays.asList( ++ AwsXrayAdaptiveSamplingConfig.AnomalyConditions.builder() ++ .setOperations(Arrays.asList("GET /api1", "GET /api2")) ++ .setErrorCodeRegex("^500$") ++ .setUsage(AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE) ++ .build())) ++ .build(); ++ XrayRulesSampler sampler = ++ new XrayRulesSampler( ++ "CLIENT_ID", ++ Resource.getDefault(), ++ clock, ++ Sampler.alwaysOn(), ++ Arrays.asList(rule1), ++ config); ++ ++ ReadableSpan readableSpanMock = mock(ReadableSpan.class); ++ when(readableSpanMock.getSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getLatencyNanos()).thenReturn(1L); ++ ++ SpanData spanDataMock = mock(SpanData.class); ++ Attributes attributesMock = mock(Attributes.class); ++ when(spanDataMock.getAttributes()).thenReturn(attributesMock); ++ when(attributesMock.get(HTTP_RESPONSE_STATUS_CODE)).thenReturn(500L); ++ ++ LongAdder exportCounter = new LongAdder(); ++ Consumer stubbedConsumer = x -> exportCounter.increment(); ++ ++ // Test matching operations ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ when(attributesMock.get(URL_PATH)).thenReturn("/api1/ext"); ++ when(attributesMock.get(HTTP_METHOD)).thenReturn("GET"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ ++ clock.advance(Duration.ofSeconds(5)); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ when(attributesMock.get(URL_PATH)).thenReturn("/api2"); ++ when(attributesMock.get(HTTP_METHOD)).thenReturn("GET"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(2L); ++ ++ // Not enough time elapsed, error rate limit was hit ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID3"); ++ when(attributesMock.get(URL_PATH)).thenReturn("/api2"); ++ when(attributesMock.get(HTTP_METHOD)).thenReturn("GET"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(0L); ++ ++ // Test non-matching operation ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID4"); ++ when(attributesMock.get(URL_PATH)).thenReturn("/api1/ext"); ++ when(attributesMock.get(HTTP_METHOD)).thenReturn("POST"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID5"); ++ when(attributesMock.get(URL_PATH)).thenReturn("/non-matching"); ++ when(attributesMock.get(HTTP_METHOD)).thenReturn("GET"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(0L); ++ ++ // Test aws.local.operation takes priority ++ clock.advance(Duration.ofSeconds(5)); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID6"); ++ when(attributesMock.get(AwsAttributeKeys.AWS_LOCAL_OPERATION)).thenReturn("GET /api1"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ ++ // Test sending previously matched traceIDs gets captured ++ clock.advance(Duration.ofSeconds(5)); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID1"); ++ when(attributesMock.get(AwsAttributeKeys.AWS_LOCAL_OPERATION)).thenReturn("GET /non-matching"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ when(spanDataMock.getTraceId()).thenReturn("TRACE_ID2"); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(2L); ++ ++ // Test sending previously matched traceIDs gets captured as long as trace is active ++ clock.advance(Duration.ofSeconds(45)); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ clock.advance(Duration.ofSeconds(45)); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ clock.advance(Duration.ofSeconds(45)); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(1L); ++ ++ // Test sending non-matching trace after expire-time elapses ++ clock.advance(Duration.ofMinutes(100)); ++ sampler.adaptSampling(readableSpanMock, spanDataMock, stubbedConsumer); ++ assertThat(exportCounter.sumThenReset()).isEqualTo(0L); ++ } ++ + private static SamplingResult doSample(Sampler sampler, String name) { + return sampler.shouldSample( + Context.current(), +diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XraySamplerClientTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XraySamplerClientTest.java +index 283e3b3c..cf0cb072 100644 +--- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XraySamplerClientTest.java ++++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XraySamplerClientTest.java +@@ -126,7 +126,8 @@ class XraySamplerClientTest { + .setRequestCount(10500) + .setSampledCount(31) + .setBorrowCount(0) +- .build())); ++ .build()), ++ Collections.emptyList()); + GetSamplingTargetsResponse response = client.getSamplingTargets(samplingTargetsRequest); + + AggregatedHttpRequest request = server.takeRequest().request(); +@@ -174,7 +175,8 @@ class XraySamplerClientTest { + assertThatThrownBy( + () -> + client.getSamplingTargets( +- GetSamplingTargetsRequest.create(Collections.emptyList()))) ++ GetSamplingTargetsRequest.create( ++ Collections.emptyList(), Collections.emptyList()))) + .isInstanceOf(UncheckedIOException.class) + .hasMessage("Failed to deserialize response."); + } +diff --git a/aws-xray/src/test/resources/sampling-rule-boost.json b/aws-xray/src/test/resources/sampling-rule-boost.json +new file mode 100644 +index 00000000..32752d5e +--- /dev/null ++++ b/aws-xray/src/test/resources/sampling-rule-boost.json +@@ -0,0 +1,22 @@ ++{ ++ "RuleName": "Test", ++ "RuleARN": "arn:aws:xray:us-east-1:595986152929:sampling-rule/Test", ++ "ResourceARN": "arn:aws:xray:us-east-1:595986152929:my-service", ++ "Priority": 1, ++ "FixedRate": 0.0, ++ "ReservoirSize": 0, ++ "ServiceName": "*", ++ "ServiceType": "*", ++ "Host": "*", ++ "HTTPMethod": "*", ++ "URLPath": "*", ++ "Version": 1, ++ "SamplingRateBoost": { ++ "MaxRate": 0.2, ++ "CooldownWindowMinutes": 300 ++ }, ++ "Attributes": { ++ "animal": "cat", ++ "speed": "0" ++ } ++} +diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts +index 8250c1bd..74a1a24c 100644 +--- a/disk-buffering/build.gradle.kts ++++ b/disk-buffering/build.gradle.kts +@@ -77,6 +77,10 @@ tasks.named("shadowJar") { + mustRunAfter("jar") + } + ++tasks.withType().configureEach { ++ dependsOn("shadowJar") ++} ++ + // The javadoc from wire's generated classes has errors that make the task that generates the "javadoc" artifact to fail. This + // makes the javadoc task to ignore those generated classes. + tasks.withType(Javadoc::class.java) { +diff --git a/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java b/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java +index 4c2c9293..4dddd975 100644 +--- a/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java ++++ b/jmx-metrics/src/integrationTest/java/io/opentelemetry/contrib/jmxmetrics/target_systems/KafkaIntegrationTest.java +@@ -44,7 +44,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + + @Container + GenericContainer kafka = +- new GenericContainer<>("bitnami/kafka:2.8.1") ++ new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +@@ -80,7 +80,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + }; + + protected GenericContainer kafkaProducerContainer() { +- return new GenericContainer<>("bitnami/kafka:2.8.1") ++ return new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +@@ -207,7 +207,7 @@ abstract class KafkaIntegrationTest extends AbstractIntegrationTest { + + @Container + GenericContainer consumer = +- new GenericContainer<>("bitnami/kafka:2.8.1") ++ new GenericContainer<>("bitnamilegacy/kafka:2.8.1") + .withNetwork(Network.SHARED) + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", "zookeeper:2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") +diff --git a/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java b/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java +index 8eb9432a..e46ed07b 100644 +--- a/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java ++++ b/jmx-scraper/src/integrationTest/java/io/opentelemetry/contrib/jmxscraper/target_systems/kafka/KafkaContainerFactory.java +@@ -12,7 +12,7 @@ import org.testcontainers.containers.wait.strategy.Wait; + public class KafkaContainerFactory { + private static final int KAFKA_PORT = 9092; + private static final String KAFKA_BROKER = "kafka:" + KAFKA_PORT; +- private static final String KAFKA_DOCKER_IMAGE = "bitnami/kafka:2.8.1"; ++ private static final String KAFKA_DOCKER_IMAGE = "bitnamilegacy/kafka:2.8.1"; + + private KafkaContainerFactory() {} + +diff --git a/opamp-client/build.gradle.kts b/opamp-client/build.gradle.kts +index e41d1fff..84a1d559 100644 +--- a/opamp-client/build.gradle.kts ++++ b/opamp-client/build.gradle.kts +@@ -1,6 +1,4 @@ + import de.undercouch.gradle.tasks.download.DownloadExtension +-import java.net.HttpURLConnection +-import java.net.URL + + plugins { + id("otel.java-conventions") +@@ -50,19 +48,7 @@ abstract class DownloadOpampProtos @Inject constructor( + + @TaskAction + fun execute() { +- // Get the latest release tag by following the redirect from GitHub's latest release URL +- val latestReleaseUrl = "https://github.com/open-telemetry/opamp-spec/releases/latest" +- val connection = URL(latestReleaseUrl).openConnection() as HttpURLConnection +- connection.instanceFollowRedirects = false +- connection.requestMethod = "HEAD" +- +- val redirectLocation = connection.getHeaderField("Location") +- connection.disconnect() +- +- // Extract tag from URL like: https://github.com/open-telemetry/opamp-spec/releases/tag/v0.12.0 +- val latestTag = redirectLocation.substringAfterLast("/") +- // Download the source code for the latest release +- val zipUrl = "https://github.com/open-telemetry/opamp-spec/zipball/$latestTag" ++ val zipUrl = "https://github.com/open-telemetry/opamp-spec/zipball/v0.14.0" + + download.run { + src(zipUrl) +diff --git a/version.gradle.kts b/version.gradle.kts +index f8358006..1f7c517f 100644 +--- a/version.gradle.kts ++++ b/version.gradle.kts +@@ -1,5 +1,5 @@ +-val stableVersion = "1.48.0" +-val alphaVersion = "1.48.0-alpha" ++val stableVersion = "1.48.0-adot1" ++val alphaVersion = "1.48.0-alpha-adot1" + + allprojects { + if (findProperty("otel.stable") != "true") { +-- +2.45.1 + diff --git a/.github/patches/versions b/.github/patches/versions index 03f4b3f51f..b1b741c455 100644 --- a/.github/patches/versions +++ b/.github/patches/versions @@ -1 +1,2 @@ OTEL_JAVA_INSTRUMENTATION_VERSION=v2.18.1 +OTEL_JAVA_CONTRIB_VERSION=v1.48.0 \ No newline at end of file diff --git a/awsagentprovider/build.gradle.kts b/awsagentprovider/build.gradle.kts index 3aeb79f935..5cc97b14fc 100644 --- a/awsagentprovider/build.gradle.kts +++ b/awsagentprovider/build.gradle.kts @@ -38,8 +38,10 @@ dependencies { implementation("io.opentelemetry.contrib:opentelemetry-aws-xray") // AWS Resource Detectors implementation("io.opentelemetry.contrib:opentelemetry-aws-resources") - // Json file reader + // JSON file reader implementation("com.fasterxml.jackson.core:jackson-databind:2.16.1") + // YAML file reader + implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.16.1") // Import AWS SDK v1 core for ARN parsing utilities implementation("com.amazonaws:aws-java-sdk-core:1.12.773") // Export configuration diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessor.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessor.java index 7b03ec068b..a5e59d0540 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessor.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AttributePropagatingSpanProcessor.java @@ -112,6 +112,7 @@ public void onStart(Context parentContext, ReadWriteSpan span) { if (propagationData != null) { span.setAttribute(propagationDataKey, propagationData); } + span.setAttribute(AwsAttributeKeys.AWS_TRACE_FLAG_SAMPLED, span.getSpanContext().isSampled()); } private boolean isConsumerKind(ReadableSpan span) { diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAgentPropertiesCustomizerProvider.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAgentPropertiesCustomizerProvider.java index 073e345de0..4480092c19 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAgentPropertiesCustomizerProvider.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsAgentPropertiesCustomizerProvider.java @@ -26,7 +26,7 @@ public void customize(AutoConfigurationCustomizer autoConfiguration) { () -> new HashMap() { { - put("otel.propagators", "baggage,xray,tracecontext,b3,b3multi"); + put("otel.propagators", "baggage,xray,tracecontext"); put("otel.instrumentation.aws-sdk.experimental-span-attributes", "true"); put( "otel.instrumentation.aws-sdk.experimental-record-individual-http-error", diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java index 13cb4ddd81..1073b52bc3 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProvider.java @@ -15,9 +15,14 @@ package software.amazon.opentelemetry.javaagent.providers; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; -import io.opentelemetry.contrib.awsxray.AlwaysRecordSampler; +import io.opentelemetry.contrib.awsxray.AwsXrayAdaptiveSamplingConfig; +import io.opentelemetry.contrib.awsxray.AwsXrayRemoteSampler; import io.opentelemetry.contrib.awsxray.ResourceHolder; import io.opentelemetry.exporter.otlp.http.logs.OtlpHttpLogRecordExporter; import io.opentelemetry.exporter.otlp.http.metrics.OtlpHttpMetricExporter; @@ -42,6 +47,11 @@ import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.export.SpanExporter; import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; @@ -142,11 +152,16 @@ public final class AwsApplicationSignalsCustomizerProvider private static final String OTEL_EXPORTER_OTLP_LOGS_COMPRESSION_CONFIG = "otel.exporter.otlp.logs.compression"; + private static final String AWS_XRAY_ADAPTIVE_SAMPLING_CONFIG = + "aws.xray.adaptive.sampling.config"; + // UDP packet can be upto 64KB. To limit the packet size, we limit the exported batch size. // This is a bit of a magic number, as there is no simple way to tell how many spans can make a // 64KB batch since spans can vary in size. private static final int LAMBDA_SPAN_EXPORT_BATCH_SIZE = 10; + private Sampler sampler; + public void customize(AutoConfigurationCustomizer autoConfiguration) { autoConfiguration.addPropertiesCustomizer(this::customizeProperties); autoConfiguration.addPropertiesCustomizer(this::customizeLambdaEnvProperties); @@ -281,6 +296,27 @@ private Resource customizeResource(Resource resource, ConfigProperties configPro } private Sampler customizeSampler(Sampler sampler, ConfigProperties configProps) { + if (sampler instanceof AwsXrayRemoteSampler) { + String config = configProps.getString(AWS_XRAY_ADAPTIVE_SAMPLING_CONFIG); + AwsXrayAdaptiveSamplingConfig parsedConfig = null; + + try { + parsedConfig = parseConfigString(config); + } catch (Exception e) { + logger.log( + Level.WARNING, "Failed to parse adaptive sampling configuration: {0}", e.getMessage()); + } + + if (parsedConfig != null) { + try { + ((AwsXrayRemoteSampler) sampler).setAdaptiveSamplingConfig(parsedConfig); + } catch (Exception e) { + logger.log( + Level.WARNING, "Error processing adaptive sampling config: {0}", e.getMessage()); + } + } + this.sampler = sampler; + } if (isApplicationSignalsEnabled(configProps)) { return AlwaysRecordSampler.create(sampler); } @@ -344,10 +380,13 @@ private SdkTracerProviderBuilder customizeTracerProviderBuilder( .build(); // Construct and set application signals metrics processor - SpanProcessor spanMetricsProcessor = + AwsSpanMetricsProcessorBuilder awsSpanMetricsProcessorBuilder = AwsSpanMetricsProcessorBuilder.create( - meterProvider, ResourceHolder.getResource(), meterProvider::forceFlush) - .build(); + meterProvider, ResourceHolder.getResource(), meterProvider::forceFlush); + if (this.sampler != null) { + awsSpanMetricsProcessorBuilder.setSampler(this.sampler); + } + SpanProcessor spanMetricsProcessor = awsSpanMetricsProcessorBuilder.build(); tracerProviderBuilder.addSpanProcessor(spanMetricsProcessor); } return tracerProviderBuilder; @@ -423,11 +462,14 @@ SpanExporter customizeSpanExporter(SpanExporter spanExporter, ConfigProperties c } if (isApplicationSignalsEnabled(configProps)) { - return AwsMetricAttributesSpanExporterBuilder.create( - spanExporter, ResourceHolder.getResource()) - .build(); + spanExporter = + AwsMetricAttributesSpanExporterBuilder.create(spanExporter, ResourceHolder.getResource()) + .build(); } + if (this.sampler instanceof AwsXrayRemoteSampler) { + ((AwsXrayRemoteSampler) this.sampler).setSpanExporter(spanExporter); + } return spanExporter; } @@ -467,6 +509,44 @@ LogRecordExporter customizeLogsExporter( return logsExporter; } + static AwsXrayAdaptiveSamplingConfig parseConfigString(String config) + throws JsonProcessingException { + if (config == null) { + return null; + } + + // Check if the config is a file path and the file exists + Path path = Paths.get(config); + if (Files.exists(path)) { + try { + config = String.join("\n", Files.readAllLines(path, StandardCharsets.UTF_8)); + } catch (IOException e) { + throw new IllegalArgumentException( + "Failed to read adaptive sampling configuration file: " + e.getMessage(), e); + } + } + + ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory()); + Map configMap = + yamlMapper.readValue(config, new TypeReference>() {}); + + Object versionObj = configMap.get("version"); + if (versionObj == null) { + throw new IllegalArgumentException( + "Missing required 'version' field in adaptive sampling configuration"); + } + + double version = ((Number) versionObj).doubleValue(); + if (version >= 2L) { + throw new IllegalArgumentException( + "Incompatible adaptive sampling config version: " + + version + + ". This version of the AWS X-Ray remote sampler only supports versions strictly below 2.0."); + } + + return yamlMapper.readValue(config, AwsXrayAdaptiveSamplingConfig.class); + } + private enum ApplicationSignalsExporterProvider { INSTANCE; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java index 37436f5d3b..9dabe2c3fb 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessor.java @@ -25,12 +25,14 @@ import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.trace.StatusCode; import io.opentelemetry.context.Context; +import io.opentelemetry.contrib.awsxray.AwsXrayRemoteSampler; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.ReadWriteSpan; import io.opentelemetry.sdk.trace.ReadableSpan; import io.opentelemetry.sdk.trace.SpanProcessor; import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.Map; import java.util.function.Supplier; import javax.annotation.concurrent.Immutable; @@ -75,6 +77,8 @@ public final class AwsSpanMetricsProcessor implements SpanProcessor { private final Resource resource; private final Supplier forceFlushAction; + private Sampler sampler; + /** Use {@link AwsSpanMetricsProcessorBuilder} to construct this processor. */ static AwsSpanMetricsProcessor create( LongHistogram errorHistogram, @@ -82,9 +86,16 @@ static AwsSpanMetricsProcessor create( DoubleHistogram latencyHistogram, MetricAttributeGenerator generator, Resource resource, + Sampler sampler, Supplier forceFlushAction) { return new AwsSpanMetricsProcessor( - errorHistogram, faultHistogram, latencyHistogram, generator, resource, forceFlushAction); + errorHistogram, + faultHistogram, + latencyHistogram, + generator, + resource, + sampler, + forceFlushAction); } private AwsSpanMetricsProcessor( @@ -93,12 +104,14 @@ private AwsSpanMetricsProcessor( DoubleHistogram latencyHistogram, MetricAttributeGenerator generator, Resource resource, + Sampler sampler, Supplier forceFlushAction) { this.errorHistogram = errorHistogram; this.faultHistogram = faultHistogram; this.latencyHistogram = latencyHistogram; this.generator = generator; this.resource = resource; + this.sampler = sampler; this.forceFlushAction = forceFlushAction; } @@ -125,6 +138,9 @@ public void onEnd(ReadableSpan span) { for (Map.Entry attribute : attributeMap.entrySet()) { recordMetrics(span, spanData, attribute.getValue()); } + if (sampler != null) { + ((AwsXrayRemoteSampler) sampler).adaptSampling(span, spanData); + } } @Override diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorBuilder.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorBuilder.java index 25ae0bd46e..e808543783 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorBuilder.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorBuilder.java @@ -24,6 +24,7 @@ import io.opentelemetry.api.metrics.MeterProvider; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.resources.Resource; +import io.opentelemetry.sdk.trace.samplers.Sampler; import java.util.function.Supplier; /** A builder for {@link AwsSpanMetricsProcessor} */ @@ -51,6 +52,7 @@ public final class AwsSpanMetricsProcessorBuilder { // Optional builder elements private MetricAttributeGenerator generator = DEFAULT_GENERATOR; + private Sampler sampler; private String scopeName = DEFAULT_SCOPE_NAME; public static AwsSpanMetricsProcessorBuilder create( @@ -80,6 +82,17 @@ public AwsSpanMetricsProcessorBuilder setGenerator(MetricAttributeGenerator gene return this; } + /** + * Sets the sampler used to determine if the spans should be sampled This will be used to increase + * sampling rate in the case of errors + */ + @CanIgnoreReturnValue + public AwsSpanMetricsProcessorBuilder setSampler(Sampler sampler) { + requireNonNull(sampler, "sampler"); + this.sampler = sampler; + return this; + } + /** * Sets the scope name used in the creation of metrics by the span metrics processor. If unset, * defaults to {@link #DEFAULT_SCOPE_NAME}. Must not be null. @@ -99,6 +112,12 @@ public AwsSpanMetricsProcessor build() { meter.histogramBuilder(LATENCY).setUnit(LATENCY_UNITS).build(); return AwsSpanMetricsProcessor.create( - errorHistogram, faultHistogram, latencyHistogram, generator, resource, forceFlushAction); + errorHistogram, + faultHistogram, + latencyHistogram, + generator, + resource, + sampler, + forceFlushAction); } } diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProviderTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProviderTest.java new file mode 100644 index 0000000000..93d6a97f11 --- /dev/null +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsApplicationSignalsCustomizerProviderTest.java @@ -0,0 +1,102 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package software.amazon.opentelemetry.javaagent.providers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatException; +import static org.assertj.core.api.Assertions.assertThatNoException; + +import com.fasterxml.jackson.core.JsonProcessingException; +import io.opentelemetry.contrib.awsxray.AwsXrayAdaptiveSamplingConfig; +import java.io.File; +import java.net.URISyntaxException; +import java.net.URL; +import org.junit.jupiter.api.Test; + +class AwsApplicationSignalsCustomizerProviderTest { + + @Test + void setAdaptiveSamplingConfigFromString_validConfig() throws JsonProcessingException { + assertThat(AwsApplicationSignalsCustomizerProvider.parseConfigString("version: 1").getVersion()) + .isEqualTo(1); + } + + @Test + void setAdaptiveSamplingConfigFromString_nullConfig() { + assertThatNoException() + .isThrownBy(() -> AwsApplicationSignalsCustomizerProvider.parseConfigString(null)); + } + + @Test + void setAdaptiveSamplingConfigFromString_missingVersion() { + assertThatException() + .isThrownBy(() -> AwsApplicationSignalsCustomizerProvider.parseConfigString("")); + } + + @Test + void setAdaptiveSamplingConfigFromString_unsupportedVersion() { + assertThatException() + .isThrownBy( + () -> AwsApplicationSignalsCustomizerProvider.parseConfigString("{version: 5000.1}")); + } + + @Test + void setAdaptiveSamplingConfigFromString_invalidYaml() { + assertThatException() + .isThrownBy( + () -> + AwsApplicationSignalsCustomizerProvider.parseConfigString( + "{version: 1, invalid: yaml: structure}")); + } + + @Test + void setAdaptiveSamplingConfigFromFile_validYaml() + throws JsonProcessingException, URISyntaxException { + // Get the resource file path + URL resourceUrl = + getClass().getClassLoader().getResource("adaptive-sampling-config-valid.yaml"); + assertThat(resourceUrl).isNotNull(); + + // Get the absolute file path + File configFile = new File(resourceUrl.toURI()); + String absolutePath = configFile.getAbsolutePath(); + + // Parse the config using the file path + AwsXrayAdaptiveSamplingConfig config = + AwsApplicationSignalsCustomizerProvider.parseConfigString(absolutePath); + + // Assert the configuration was parsed correctly + assertThat(config).isNotNull(); + assertThat(config.getVersion()).isEqualTo(1); + assertThat(config.getAnomalyCaptureLimit().getAnomalyTracesPerSecond()).isEqualTo(10); + } + + @Test + void setAdaptiveSamplingConfigFromFile_invalidYaml() throws URISyntaxException { + // Get the resource file path + URL resourceUrl = + getClass().getClassLoader().getResource("adaptive-sampling-config-invalid.yaml"); + assertThat(resourceUrl).isNotNull(); + + // Get the absolute file path + File configFile = new File(resourceUrl.toURI()); + String absolutePath = configFile.getAbsolutePath(); + + // Parse the config using the file path + assertThatException() + .isThrownBy(() -> AwsApplicationSignalsCustomizerProvider.parseConfigString(absolutePath)); + } +} diff --git a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java index 28a628f526..ad436651c4 100644 --- a/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java +++ b/awsagentprovider/src/test/java/software/amazon/opentelemetry/javaagent/providers/AwsSpanMetricsProcessorTest.java @@ -36,6 +36,7 @@ import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.context.Context; +import io.opentelemetry.contrib.awsxray.AwsXrayRemoteSampler; import io.opentelemetry.sdk.common.CompletableResultCode; import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.resources.Resource; @@ -76,6 +77,7 @@ private enum ExpectedStatusMetric { private LongHistogram faultHistogramMock; private DoubleHistogram latencyHistogramMock; private MetricAttributeGenerator generatorMock; + private AwsXrayRemoteSampler samplerMock; private AwsSpanMetricsProcessor awsSpanMetricsProcessor; // Mock forceFlush function that returns success when invoked similar @@ -90,6 +92,7 @@ public void setUpMocks() { faultHistogramMock = mock(LongHistogram.class); latencyHistogramMock = mock(DoubleHistogram.class); generatorMock = mock(MetricAttributeGenerator.class); + samplerMock = mock(AwsXrayRemoteSampler.class); awsSpanMetricsProcessor = AwsSpanMetricsProcessor.create( @@ -98,6 +101,7 @@ public void setUpMocks() { latencyHistogramMock, generatorMock, testResource, + samplerMock, this::forceFlushAction); } diff --git a/awsagentprovider/src/test/resources/adaptive-sampling-config-invalid.yaml b/awsagentprovider/src/test/resources/adaptive-sampling-config-invalid.yaml new file mode 100644 index 0000000000..888ae7ee3e --- /dev/null +++ b/awsagentprovider/src/test/resources/adaptive-sampling-config-invalid.yaml @@ -0,0 +1,13 @@ +version: 1.0 +anomalyConditions: + - errorCodeRegex: "^5\\d\\d$" + usage: both + - errorCodeRegex: "^4\\d\\d$" + usage: both + - errorCodeRegex: "^3\\d\\d$" + usage: both + - errorCodeRegex: "^2\\d\\d$" + operations: invalid part of config + usage: both +anomalyCaptureLimit: + anomalyTracesPerSecond: 10 \ No newline at end of file diff --git a/awsagentprovider/src/test/resources/adaptive-sampling-config-valid.yaml b/awsagentprovider/src/test/resources/adaptive-sampling-config-valid.yaml new file mode 100644 index 0000000000..dcfd187628 --- /dev/null +++ b/awsagentprovider/src/test/resources/adaptive-sampling-config-valid.yaml @@ -0,0 +1,12 @@ +version: 1.0 +anomalyConditions: + - errorCodeRegex: "^5\\d\\d$" + usage: both + - errorCodeRegex: "^4\\d\\d$" + usage: both + - errorCodeRegex: "^3\\d\\d$" + usage: both + - errorCodeRegex: "^2\\d\\d$" + usage: both +anomalyCaptureLimit: + anomalyTracesPerSecond: 10 \ No newline at end of file diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index d186406009..b1f0bcf30b 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -76,7 +76,7 @@ val dependencyLists = listOf( "commons-logging:commons-logging:1.2", "com.sparkjava:spark-core:2.9.4", "com.squareup.okhttp3:okhttp:4.12.0", - "io.opentelemetry.contrib:opentelemetry-aws-xray:1.48.0", + "io.opentelemetry.contrib:opentelemetry-aws-xray:1.48.0-adot1", "io.opentelemetry.contrib:opentelemetry-aws-resources:1.48.0-alpha", "io.opentelemetry.proto:opentelemetry-proto:1.0.0-alpha", "io.opentelemetry.javaagent:opentelemetry-javaagent:$otelJavaAgentVersion", diff --git a/smoke-tests/runner/src/test/java/io/awsobservability/instrumentation/smoketests/runner/SpringBootSmokeTest.java b/smoke-tests/runner/src/test/java/io/awsobservability/instrumentation/smoketests/runner/SpringBootSmokeTest.java index f22b29cf03..2cc06551ee 100644 --- a/smoke-tests/runner/src/test/java/io/awsobservability/instrumentation/smoketests/runner/SpringBootSmokeTest.java +++ b/smoke-tests/runner/src/test/java/io/awsobservability/instrumentation/smoketests/runner/SpringBootSmokeTest.java @@ -165,11 +165,7 @@ void hello() { assertThat(response.status().isSuccess()).isTrue(); assertThat(response.headers()) .extracting(e -> e.getKey().toString()) - .contains( - "received-x-amzn-trace-id", - "received-b3", - "received-x-b3-traceid", - "received-traceparent"); + .contains("received-x-amzn-trace-id", "received-traceparent"); var exported = getExported(); assertThat(exported) From 7903dbf63f1fab1dd7d5d80c78720f0e7d15f5ce Mon Sep 17 00:00:00 2001 From: Min Xia Date: Wed, 3 Sep 2025 17:12:51 -0700 Subject: [PATCH 36/83] Update owasp.yml to scan 2.11.4 release (#1172) *Description of changes:* Update owasp.yml to scan 2.11.4 release. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. Co-authored-by: $(git --no-pager log --format=format:'%an' -n 1) <$(git --no-pager log --format=format:'%ae' -n 1)> --- .github/workflows/owasp.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/owasp.yml b/.github/workflows/owasp.yml index 6a3dbd5949..7a85ff57d9 100644 --- a/.github/workflows/owasp.yml +++ b/.github/workflows/owasp.yml @@ -112,7 +112,7 @@ jobs: id: high_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.3" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.4" severity: 'CRITICAL,HIGH' logout: 'false' @@ -121,7 +121,7 @@ jobs: id: low_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.3" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.4" severity: 'MEDIUM,LOW,UNKNOWN' logout: 'false' From fb742d5d73e3e464ee1453dd3ec167cd36ed0f40 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Fri, 5 Sep 2025 11:45:13 -0700 Subject: [PATCH 37/83] bump Netty to 4.1.126 (#1173) *Issue #, if available:* *Description of changes:* Fixes [CVE-2025-58056](https://github.com/advisories/GHSA-fghv-69vj-qj49). See upstream [PR](https://github.com/aws/aws-sdk-java-v2/pull/6398). By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- dependencyManagement/build.gradle.kts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index b1f0bcf30b..d6218a08b2 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -40,9 +40,10 @@ val dependencyBoms = listOf( "com.google.protobuf:protobuf-bom:3.25.1", "com.linecorp.armeria:armeria-bom:1.26.4", "io.grpc:grpc-bom:1.59.1", - // netty-bom is a fix for CVE-2025-55163 (https://github.com/advisories/GHSA-prj3-ccx8-p6x4). - // Remove once https://github.com/aws/aws-sdk-java-v2/pull/6344 is released. - "io.netty:netty-bom:4.1.124.Final", + // netty-bom is a fix for CVE-2025-58056 (https://github.com/advisories/GHSA-fghv-69vj-qj49). + // Remove once https://github.com/aws/aws-sdk-java-v2/pull/6398 and https://github.com/aws/aws-sdk-java/pull/3192 + // are both merged and released, and we update the corresponding dependencies. + "io.netty:netty-bom:4.1.126.Final", "io.opentelemetry.instrumentation:opentelemetry-instrumentation-bom-alpha:$otelAlphaVersion", "org.apache.logging.log4j:log4j-bom:2.21.1", "org.junit:junit-bom:5.10.1", From 3b7c9a6c8ddceab6d814c885a79c408d6ec62102 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Fri, 5 Sep 2025 13:19:05 -0700 Subject: [PATCH 38/83] add version.gradle.kts (#1122) Issue #, if available: Description of changes: (copied from #1121) The current version for ADOT SDK and Lambda Layer releases is only provided manually in the release workflow and auto-generated by the nebula release plugin based on previous tags, but never specified in the code. Adding a version file will provide explicit version management and make it easier to track upcoming releases. adotVersion is currently set to 2.11.1-dev0, following the convention used by our other ADOT projects ([Python](https://github.com/aws-observability/aws-otel-python-instrumentation/blob/main/aws-opentelemetry-distro/src/amazon/opentelemetry/distro/version.py), [.NET](https://github.com/aws-observability/aws-otel-dotnet-instrumentation/blob/main/src/AWS.Distro.OpenTelemetry.AutoInstrumentation/Version.cs), [NodeJS](https://github.com/aws-observability/aws-otel-js-instrumentation/blob/629faf5d1608e6fd803f9b2c2e15d8c26ff7b40a/package.json#L3)). The ADOT Lambda layer was previously given a version name after the upstream OpenTelemetry instrumentation. For example, for the most recent 2.11.1 release -- which still depends on OpenTelemetry Java Instrumentation 2.11.0 -- logs the following upon startup: ``` INFO io.opentelemetry.javaagent.tooling.VersionLogger - opentelemetry-javaagent - version: 2.11.0-adot-lambda1-aws ``` This change has the Lambda layer's Otel agent consume the version provided in version.gradle.kts. For example, after building locally, the layer will now log: ``` INFO io.opentelemetry.javaagent.tooling.VersionLogger - opentelemetry-javaagent - version: 2.11.1-dev0-adot-lambda1-aws ``` which is the accurate version. Soon we will add pre-release and post-release workflows like the other language ADOT repos to automatically bump the version number. This is part of our work to align the release process for all 4 languages and streamline releases for future engineers. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- build.gradle.kts | 4 +++- lambda-layer/build-layer.sh | 17 ++++++++++++----- lambda-layer/build.gradle.kts | 3 ++- version.gradle.kts | 24 ++++++++++++++++++++++++ 4 files changed, 41 insertions(+), 7 deletions(-) create mode 100644 version.gradle.kts diff --git a/build.gradle.kts b/build.gradle.kts index 843124bd80..6fdbd31a30 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -37,6 +37,8 @@ nebulaRelease { addReleaseBranchPattern("""v\d+\.\d+\.x""") } +apply(from = "version.gradle.kts") + nexusPublishing { repositories { sonatype { @@ -71,7 +73,7 @@ allprojects { ktlint("1.4.0").editorConfigOverride(mapOf("indent_size" to "2", "continuation_indent_size" to "2")) // Doesn't support pluginManagement block - targetExclude("settings.gradle.kts") + targetExclude("settings.gradle.kts", "version.gradle.kts") if (!project.path.startsWith(":sample-apps:")) { licenseHeaderFile("${rootProject.projectDir}/config/license/header.java", "plugins|include|import") diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index ddd144716d..791ad59152 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -4,13 +4,20 @@ set -e SOURCEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +## Get ADOT version +echo "Info: Getting ADOT Version" +pushd "$SOURCEDIR"/.. +version=$(./gradlew -q printVersion) +echo "Found ADOT Version: ${version}" +popd + ## Get OTel version echo "Info: Getting OTEL Version" file="$SOURCEDIR/../.github/patches/versions" -version=$(awk -F'=v' '/OTEL_JAVA_INSTRUMENTATION_VERSION/ {print $2}' "$file") -echo "Found OTEL Version: ${version}" +otel_instrumentation_version=$(awk -F'=v' '/OTEL_JAVA_INSTRUMENTATION_VERSION/ {print $2}' "$file") +echo "Found OTEL Version: ${otel_instrumentation_version}" # Exit if the version is empty or null -if [[ -z "$version" ]]; then +if [[ -z "$otel_instrumentation_version" ]]; then echo "Error: Version could not be found in ${file}." exit 1 fi @@ -20,7 +27,7 @@ fi echo "Info: Cloning and Patching OpenTelemetry Java Instrumentation Repository" git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git pushd opentelemetry-java-instrumentation -git checkout v${version} -b tag-v${version} +git checkout v${otel_instrumentation_version} -b tag-v${otel_instrumentation_version} # This patch is for Lambda related context propagation patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch @@ -56,7 +63,7 @@ popd ## Build ADOT Lambda Java SDK Layer Code echo "Info: Building ADOT Lambda Java SDK Layer Code" -./gradlew build -PotelVersion=${version} +./gradlew build -PotelVersion=${otel_instrumentation_version} -Pversion=${version} ## Copy ADOT Java Agent downloaded using Gradle task and bundle it with the Lambda handler script diff --git a/lambda-layer/build.gradle.kts b/lambda-layer/build.gradle.kts index 059294cfec..ade2a438d3 100644 --- a/lambda-layer/build.gradle.kts +++ b/lambda-layer/build.gradle.kts @@ -27,6 +27,7 @@ val javaagentDependency by configurations.creating { extendsFrom() } +val version: String by project val otelVersion: String by project dependencies { @@ -35,7 +36,7 @@ dependencies { // Already included in wrapper so compileOnly compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi") compileOnly("io.opentelemetry:opentelemetry-sdk-extension-aws") - javaagentDependency("software.amazon.opentelemetry:aws-opentelemetry-agent:$otelVersion-adot-lambda1") + javaagentDependency("software.amazon.opentelemetry:aws-opentelemetry-agent:$version-adot-lambda1") } tasks.register("download") { diff --git a/version.gradle.kts b/version.gradle.kts new file mode 100644 index 0000000000..586fdc18f3 --- /dev/null +++ b/version.gradle.kts @@ -0,0 +1,24 @@ +/* + * Copyright Amazon.com, Inc. or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +val adotVersion = "2.18.0-dev0" + +allprojects { + version = if (project.hasProperty("release.version")) { + project.property("release.version") as String + } else { + adotVersion + } +} From e3e80cd66698af1a75fc97e6e464ac0bf8dd18d5 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Fri, 5 Sep 2025 14:13:21 -0700 Subject: [PATCH 39/83] Add pre-release and post-release workflows (#1123) *Issue #, if available:* *Description of changes:* Added pre-release and post-release workflows to Java. These workflows replace the manual effort needed to create pre-release and post-release branches and update the project version during the release process. Successful run in test branch: https://github.com/ezhang6811/aws-otel-java-instrumentation/actions/runs/16606979278 https://github.com/ezhang6811/aws-otel-java-instrumentation/pull/9 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../workflows/{owasp.yml => daily-scan.yml} | 0 .../workflows/post-release-version-bump.yml | 120 ++++++++++++++++++ .github/workflows/pre-release-prepare.yml | 106 ++++++++++++++++ 3 files changed, 226 insertions(+) rename .github/workflows/{owasp.yml => daily-scan.yml} (100%) create mode 100644 .github/workflows/post-release-version-bump.yml create mode 100644 .github/workflows/pre-release-prepare.yml diff --git a/.github/workflows/owasp.yml b/.github/workflows/daily-scan.yml similarity index 100% rename from .github/workflows/owasp.yml rename to .github/workflows/daily-scan.yml diff --git a/.github/workflows/post-release-version-bump.yml b/.github/workflows/post-release-version-bump.yml new file mode 100644 index 0000000000..7f619bbbe9 --- /dev/null +++ b/.github/workflows/post-release-version-bump.yml @@ -0,0 +1,120 @@ +name: Post Release - Prepare Main for Next Development Cycle + +on: + workflow_dispatch: + inputs: + version: + description: 'Version number (e.g., 1.0.1)' + required: true + +env: + AWS_DEFAULT_REGION: us-east-1 + +permissions: + id-token: write + contents: write + pull-requests: write + +jobs: + check-version: + runs-on: ubuntu-latest + steps: + - name: Checkout main + uses: actions/checkout@v2 + with: + ref: main + fetch-depth: 0 + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Get current major.minor version from main branch + id: get_version + run: | + CURRENT_VERSION=$(grep '__version__' aws-opentelemetry-distro/src/amazon/opentelemetry/distro/version.py | sed -E 's/__version__ = "([0-9]+\.[0-9]+)\.[0-9]+.*"/\1/') + echo "CURRENT_MAJOR_MINOR_VERSION=$CURRENT_VERSION" >> $GITHUB_ENV + + - name: Set major and minor for current version + run: | + echo "CURRENT_MAJOR=$(echo $CURRENT_MAJOR_MINOR_VERSION | cut -d. -f1)" >> $GITHUB_ENV + echo "CURRENT_MINOR=$(echo $CURRENT_MAJOR_MINOR_VERSION | cut -d. -f2)" >> $GITHUB_ENV + + - name: Set major and minor for input version + run: | + echo "INPUT_MAJOR=$(echo $MAJOR_MINOR | cut -d. -f1)" >> $GITHUB_ENV + echo "INPUT_MINOR=$(echo $MAJOR_MINOR | cut -d. -f2)" >> $GITHUB_ENV + + - name: Compare major.minor version and skip if behind + run: | + if [ "$CURRENT_MAJOR" -gt "$INPUT_MAJOR" ] || { [ "$CURRENT_MAJOR" -eq "$INPUT_MAJOR" ] && [ "$CURRENT_MINOR" -gt "$INPUT_MINOR" ]; }; then + echo "Input version is behind main's current major.minor version, don't need to update major version" + exit 1 + fi + + + prepare-main: + runs-on: ubuntu-latest + needs: check-version + steps: + - name: Configure AWS credentials for BOT secrets + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Get Bot secrets + uses: aws-actions/aws-secretsmanager-get-secrets@v1 + id: bot_secrets + with: + secret-ids: | + BOT_TOKEN ,${{ secrets.BOT_TOKEN_SECRET_ARN }} + parse-json-secrets: true + + - name: Setup Git + uses: actions/checkout@v2 + with: + fetch-depth: 0 + token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + + - name: Configure Git + run: | + git config user.name "github-actions" + git config user.email "github-actions@github.com" + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Determine release branch and checkout + run: | + RELEASE_BRANCH="release/v${MAJOR_MINOR}.x" + git fetch origin $RELEASE_BRANCH + git checkout -b "prepare-main-for-next-dev-cycle-${VERSION}" origin/$RELEASE_BRANCH + + - name: Update version to next development version in main + run: | + DEV_VERSION="${{ github.event.inputs.version }}.dev0" + sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${DEV_VERSION}\"/" version.gradle.kts + VERSION="${{ github.event.inputs.version }}" + sed -i'' -e 's/adot-autoinstrumentation-java:v2.*"/adot-autoinstrumentation-java:v'$VERSION'"/' .github/workflows/daily-scan.yml + git add version.gradle.kts + git add .github/workflows/daily-scan.yml + git commit -m "Prepare main for next development cycle: Update version to $DEV_VERSION" + git push --set-upstream origin "prepare-main-for-next-dev-cycle-${VERSION}" + + - name: Create Pull Request to main + env: + GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + run: | + DEV_VERSION="${{ github.event.inputs.version }}.dev0" + gh pr create --title "Post release $VERSION: Update version to $DEV_VERSION" \ + --body "This PR prepares the main branch for the next development cycle by updating the version to $DEV_VERSION and updating the image version to be scanned to the latest released. + + This PR should only be merge when release for version v$VERSION is success. + + By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice." \ + --head prepare-main-for-next-dev-cycle-${VERSION} \ + --base main \ No newline at end of file diff --git a/.github/workflows/pre-release-prepare.yml b/.github/workflows/pre-release-prepare.yml new file mode 100644 index 0000000000..9e4c634467 --- /dev/null +++ b/.github/workflows/pre-release-prepare.yml @@ -0,0 +1,106 @@ +name: Pre Release Prepare - Update Version and Create PR + +on: + workflow_dispatch: + inputs: + version: + description: 'Version number (e.g., 1.0.1)' + required: true + is_patch: + description: 'Is this a patch? (true or false)' + required: true + default: 'false' + +env: + AWS_DEFAULT_REGION: us-east-1 + +permissions: + contents: write + pull-requests: write + id-token: write + + +jobs: + update-version-and-create-pr: + runs-on: ubuntu-latest + steps: + - name: Configure AWS credentials for BOT secrets + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Get Bot secrets + uses: aws-actions/aws-secretsmanager-get-secrets@v1 + id: bot_secrets + with: + secret-ids: | + BOT_TOKEN ,${{ secrets.BOT_TOKEN_SECRET_ARN }} + parse-json-secrets: true + + - name: Checkout main branch + uses: actions/checkout@v3 + with: + ref: 'main' + token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + + - name: Setup Git + run: | + git config user.name "github-actions" + git config user.email "github-actions@github.com" + + - name: Extract Major.Minor Version and setup Env variable + run: | + echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV + echo "MAJOR_MINOR=$(echo ${{ github.event.inputs.version }} | sed -E 's/([0-9]+\.[0-9]+)\.[0-9]+/\1/')" >> $GITHUB_ENV + + - name: Create branches + run: | + IS_PATCH=${{ github.event.inputs.is_patch }} + if [[ "$IS_PATCH" != "true" && "$IS_PATCH" != "false" ]]; then + echo "Invalid input for IS_PATCH. Must be 'true' or 'false'." + exit 1 + fi + + + if git ls-remote --heads origin release/v${MAJOR_MINOR}.x | grep -q "release/v${MAJOR_MINOR}.x"; then + if [ "$IS_PATCH" = "true" ]; then + git fetch origin release/v${MAJOR_MINOR}.x + echo "Branch release/v${MAJOR_MINOR}.x already exists, checking out." + git checkout "release/v${MAJOR_MINOR}.x" + else + echo "Error, release series branch release/v${MAJOR_MINOR}.x exist for non-patch release" + echo "Check your input or branch" + exit 1 + fi + else + if [ "$IS_PATCH" = "true" ]; then + echo "Error, release series branch release/v${MAJOR_MINOR}.x NOT exist for patch release" + echo "Check your input or branch" + exit 1 + else + echo "Creating branch release/v${MAJOR_MINOR}.x." + git checkout -b "release/v${MAJOR_MINOR}.x" + git push origin "release/v${MAJOR_MINOR}.x" + fi + fi + + git checkout -b "v${VERSION}_release" + git push origin "v${VERSION}_release" + + - name: Update version in file + run: | + sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${VERSION}\"/" version.gradle.kts + git commit -am "Update version to ${VERSION}" + git push origin "v${VERSION}_release" + + - name: Create pull request against the release branch + env: + GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} + run: | + gh pr create --title "Pre-release: Update version to ${VERSION}" \ + --body "This PR updates the version to ${VERSION}. + + By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice." \ + --head v${{ github.event.inputs.version }}_release \ + --base release/v${MAJOR_MINOR}.x \ No newline at end of file From 9b5c6406e1e11a483b6d67e77795894ad9d5ba37 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Fri, 5 Sep 2025 15:22:42 -0700 Subject: [PATCH 40/83] Add main build validation for release workflow (#1125) *Issue #, if available:* *Description of changes:* This PR modifies the release build workflow to wait for the main build workflow in the same branch to complete successfully. before proceeding with the release. see [Python PR](https://github.com/aws-observability/aws-otel-python-instrumentation/pull/443) for more details and testing. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/main-build.yml | 1 + .github/workflows/release-build.yml | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 26f3ba2067..a5136263a5 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -256,6 +256,7 @@ jobs: aws s3 cp ./build/distributions/aws-opentelemetry-java-layer.zip s3://adot-main-build-staging-jar/adot-java-lambda-layer-${{ github.run_id }}.zip application-signals-e2e-test: + name: "Application Signals E2E Test" needs: [build, application-signals-lambda-layer-build] uses: ./.github/workflows/application-signals-e2e-test.yml secrets: inherit diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 41862f5c08..457320be8b 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -25,6 +25,24 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 + + - name: Check main build status + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + WORKFLOW_ID=$(gh api repos/${{ github.repository }}/actions/workflows --jq '.workflows[] | select(.name=="Java Agent Main Build") | .id') + LATEST_RUN=$(gh api repos/${{ github.repository }}/actions/workflows/$WORKFLOW_ID/runs --jq '[.workflow_runs[] | select(.head_branch=="${{ github.ref_name }}")] | sort_by(.created_at) | .[-1] | {conclusion, status}') + STATUS=$(echo "$LATEST_RUN" | jq -r '.status') + CONCLUSION=$(echo "$LATEST_RUN" | jq -r '.conclusion') + + if [ "$STATUS" = "in_progress" ] || [ "$STATUS" = "queued" ]; then + echo "Main build is still running (status: $STATUS). Cannot proceed with release." + exit 1 + elif [ "$CONCLUSION" != "success" ]; then + echo "Latest main build on branch ${{ github.ref_name }} conclusion: $CONCLUSION" + exit 1 + fi + echo "Main build succeeded, proceeding with release" - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 with: java-version-file: .java-version From 53acc0180a8d20da9345abc025e302222dc8a711 Mon Sep 17 00:00:00 2001 From: Jonathan Lee <107072447+jj22ee@users.noreply.github.com> Date: Sat, 6 Sep 2025 20:20:36 -0700 Subject: [PATCH 41/83] fix: UDP Exporter Sample App not starting via Gradle (#1171) *Issue #, if available:* - Sample app for UDP Exporter is failing via `gradle build`: https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/17167485805/job/48710900750 *Description of changes:* - UDP e2e test uses `gradle` from GH Action env, which isn't stable. Use `gradlew` that exists in this repo instead. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/udp-exporter-e2e-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index a3e4631db8..acf7e1800a 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -51,8 +51,8 @@ jobs: run: | export XRAY_UDP_SPAN_EXPORTER_VERSION=${{ steps.build-udp-exporter.outputs.xrayUdpSpanExporterVersion }} echo "Running Sample App against X-Ray UDP Span Exporter version: $XRAY_UDP_SPAN_EXPORTER_VERSION" - gradle build - gradle bootRun & + ../../gradlew build + ../../gradlew bootRun & sleep 5 - name: Call Sample App Endpoint From 085a9dd6d2c3defefca784aa99b7f60dc631f326 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 18:21:06 +0000 Subject: [PATCH 42/83] Bump actions/setup-java from 4.7.1 to 5.0.0 (#1167) --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/daily-scan.yml | 2 +- .../docker-build-smoke-tests-fake-backend.yml | 2 +- .github/workflows/e2e-tests-app-with-java-agent.yml | 8 ++++---- .github/workflows/e2e-tests-with-operator.yml | 4 ++-- .github/workflows/main-build.yml | 8 ++++---- .github/workflows/nightly-upstream-snapshot-build.yml | 4 ++-- .github/workflows/patch-release-build.yml | 2 +- .github/workflows/pr-build.yml | 10 +++++----- .github/workflows/release-build.yml | 2 +- .github/workflows/release-lambda.yml | 2 +- .github/workflows/release-udp-exporter.yml | 2 +- .github/workflows/udp-exporter-e2e-test.yml | 2 +- 13 files changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index c75f707d0a..7c955f71e2 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -31,7 +31,7 @@ jobs: with: languages: java - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index 7a85ff57d9..3b86480666 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -31,7 +31,7 @@ jobs: fetch-depth: 0 - name: Set up Java for dependency scan - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 3ad1d3f6de..5564e57dfc 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index df1f06307a..b8b61d0eb0 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -29,7 +29,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -81,7 +81,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -112,7 +112,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -143,7 +143,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 92dfbee4f9..3fce94edb7 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -38,7 +38,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -132,7 +132,7 @@ jobs: path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: distribution: 'zulu' java-version: '11' diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index a5136263a5..76cbc58b6a 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -23,7 +23,7 @@ jobs: runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -57,7 +57,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -192,7 +192,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: 'temurin' @@ -232,7 +232,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index 7be913ca15..7b9370224d 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -27,7 +27,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -132,7 +132,7 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: 'temurin' diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 4da82f9a43..66c066ca9e 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -61,7 +61,7 @@ jobs: with: ref: ${{ needs.prepare-release-branch.outputs.release-branch-name }} - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 87ecee6357..389d6140cd 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -14,7 +14,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -56,7 +56,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -93,7 +93,7 @@ jobs: ./gradlew build -p exporters/aws-distro-opentelemetry-xray-udp-span-exporter - name: Set up Java version for tests - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: temurin @@ -109,7 +109,7 @@ jobs: arguments: contractTests -PlocalDocker=true -i - name: Set up Java version for image build - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -168,7 +168,7 @@ jobs: uses: actions/checkout@v5 - name: Setup Java - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 457320be8b..1df004b1fc 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -43,7 +43,7 @@ jobs: exit 1 fi echo "Main build succeeded, proceeding with release" - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index fbd12eb653..a171012d76 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -42,7 +42,7 @@ jobs: - name: Checkout Repo @ SHA - ${{ github.sha }} uses: actions/checkout@v5 - - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index c72b8f7a77..398a34a604 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -29,7 +29,7 @@ jobs: uses: actions/checkout@v5 - name: Set up Java - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index acf7e1800a..ec227e689a 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -16,7 +16,7 @@ jobs: uses: actions/checkout@v5 - name: Set up Java - uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' From 5fe1931af1cc7b86166599a4a5d7f363a14f64dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 19:03:21 +0000 Subject: [PATCH 43/83] Bump tempfile from 3.20.0 to 3.21.0 in /tools/cp-utility (#1168) --- tools/cp-utility/Cargo.lock | 4 ++-- tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index 711702b632..874a5f403e 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -182,9 +182,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.20.0" +version = "3.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e" dependencies = [ "fastrand", "getrandom", diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index 7ff1c7586b..a758361078 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -10,7 +10,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests -tempfile = "3.20.0" +tempfile = "3.21.0" uuid = { version = "1.18.0", features = ["v4", "fast-rng"] } [profile.release] From fa548e21dce11fba674d15dc8d2e50ba88168427 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 20:51:40 +0000 Subject: [PATCH 44/83] Bump org.apache.logging.log4j:log4j-core from 2.22.1 to 2.25.1 (#1166) --- instrumentation/log4j-2.13.2/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/instrumentation/log4j-2.13.2/build.gradle.kts b/instrumentation/log4j-2.13.2/build.gradle.kts index 529f9953c3..027ded24a8 100644 --- a/instrumentation/log4j-2.13.2/build.gradle.kts +++ b/instrumentation/log4j-2.13.2/build.gradle.kts @@ -25,5 +25,5 @@ dependencies { compileOnly("io.opentelemetry.javaagent:opentelemetry-javaagent-extension-api") compileOnly("net.bytebuddy:byte-buddy") - compileOnly("org.apache.logging.log4j:log4j-core:2.22.1") + compileOnly("org.apache.logging.log4j:log4j-core:2.25.1") } From 91d43ac0290ecb1334600525281a8be9203d2b09 Mon Sep 17 00:00:00 2001 From: Mahad Janjua <134644284+majanjua-amzn@users.noreply.github.com> Date: Tue, 9 Sep 2025 08:07:19 -0700 Subject: [PATCH 45/83] [Lambda] Remove b3/b3multi propagators (#1174) --- lambda-layer/otel-instrument | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lambda-layer/otel-instrument b/lambda-layer/otel-instrument index 8bf5cf4657..7fd2366d1e 100644 --- a/lambda-layer/otel-instrument +++ b/lambda-layer/otel-instrument @@ -2,7 +2,7 @@ export OTEL_INSTRUMENTATION_AWS_SDK_EXPERIMENTAL_SPAN_ATTRIBUTES=true -export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,xray,tracecontext,b3,b3multi}" +export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,xray,tracecontext}" export OTEL_SERVICE_NAME=${OTEL_SERVICE_NAME:-${AWS_LAMBDA_FUNCTION_NAME}} From 758474aec4518d73560485b769de9add336adcb9 Mon Sep 17 00:00:00 2001 From: Jonathan Lee <107072447+jj22ee@users.noreply.github.com> Date: Tue, 9 Sep 2025 16:18:37 -0700 Subject: [PATCH 46/83] Update Kotlin version in UDP Exporter Sample Test App (#1184) *Issue #, if available:* - Fix failing workflow `udp-exporter-e2e-test`, where the sample app is unable to be built - https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/17587060722/job/49957841960 ``` FAILURE: Build failed with an exception. * What went wrong: Could not determine the dependencies of task ':compileKotlin'. > Unknown Kotlin JVM target: 21 ``` *Description of changes:* - Update Kotlin version in UDP Exporter Sample Test App. It is also now aligned with the Kotlin version used in other `build.gradle.kts` files in this repo. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- sample-apps/udp-exporter-test-app/build.gradle.kts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample-apps/udp-exporter-test-app/build.gradle.kts b/sample-apps/udp-exporter-test-app/build.gradle.kts index fdce4008df..a24f883540 100644 --- a/sample-apps/udp-exporter-test-app/build.gradle.kts +++ b/sample-apps/udp-exporter-test-app/build.gradle.kts @@ -1,6 +1,6 @@ plugins { id("java") - kotlin("jvm") version "1.9.0" + kotlin("jvm") version "2.1.0-RC2" id("io.spring.dependency-management") version "1.1.0" id("org.springframework.boot") version "2.7.17" } From f68552532ab8f583fd8f22efcf481f49a52908fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 15:49:53 +0000 Subject: [PATCH 47/83] Bump actions/setup-go from 5 to 6 (#1182) --- .github/workflows/e2e-tests-with-operator.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 3fce94edb7..5ff4473302 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -97,7 +97,7 @@ jobs: path: aws-otel-java-instrumentation - name: Set up Go 1.x - uses: actions/setup-go@v5 + uses: actions/setup-go@v6 with: go-version: '~1.18.9' From b75aaa72efe90e425daadc9f01e4948a1856f97c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Sep 2025 09:42:09 -0700 Subject: [PATCH 48/83] Bump tempfile from 3.21.0 to 3.22.0 in /tools/cp-utility (#1190) Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.21.0 to 3.22.0.

Changelog

Sourced from tempfile's changelog.

3.22.0

  • Updated windows-sys requirement to allow version 0.61.x
  • Remove unstable-windows-keep-open-tempfile feature.
Commits
  • f720dbe chore: release 3.22.0
  • 55d742c chore: remove deprecated unstable feature flag
  • bc41a0b build(deps): update windows-sys requirement from >=0.52, <0.61 to >=0.52, <0....
  • 3c55387 test: make sure we don't drop tempdirs early (#373)
  • 17bf644 doc(builder): clarify permissions (#372)
  • c7423f1 doc(env): document the alternative to setting the tempdir (#371)
  • 5af60ca test(wasi): run a few tests that shouldn't have been disabled (#370)
  • 6c0c561 fix(doc): temp_dir doesn't check if writable
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tempfile&package-manager=cargo&previous-version=3.21.0&new-version=3.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- tools/cp-utility/Cargo.lock | 4 ++-- tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index 874a5f403e..be6df5c070 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -182,9 +182,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.21.0" +version = "3.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e" +checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" dependencies = [ "fastrand", "getrandom", diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index a758361078..8f04b613b6 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -10,7 +10,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests -tempfile = "3.21.0" +tempfile = "3.22.0" uuid = { version = "1.18.0", features = ["v4", "fast-rng"] } [profile.release] From 8517061060e6d00cd444b6fdcac60e4b457476b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 16 Sep 2025 17:25:07 +0000 Subject: [PATCH 49/83] Bump gradle/actions from 4.4.2 to 4.4.3 (#1189) --- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/docker-build-corretto-slim.yml | 2 +- .github/workflows/docker-build-smoke-tests-fake-backend.yml | 2 +- .github/workflows/e2e-tests-app-with-java-agent.yml | 2 +- .github/workflows/main-build.yml | 6 +++--- .github/workflows/nightly-upstream-snapshot-build.yml | 4 ++-- .github/workflows/pr-build.yml | 4 ++-- .github/workflows/release-build.yml | 2 +- 8 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7c955f71e2..ab3dd2e5cb 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -50,7 +50,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Manually build to avoid autobuild failures uses: gradle/gradle-build-action@v3 diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index ed461109ee..d883c4a9de 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 5564e57dfc..7d774e4f11 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -38,7 +38,7 @@ jobs: with: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 with: diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index b8b61d0eb0..d4f290ccd5 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -51,7 +51,7 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 76cbc58b6a..b8db13f7c7 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -39,7 +39,7 @@ jobs: with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -76,7 +76,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -196,7 +196,7 @@ jobs: with: java-version: 23 distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index 7b9370224d..dc3c66ad45 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -46,7 +46,7 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 @@ -136,7 +136,7 @@ jobs: with: java-version: 23 distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials uses: aws-actions/configure-aws-credentials@v4 diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 389d6140cd..5bd4eba669 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -31,7 +31,7 @@ jobs: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - uses: ./.github/actions/patch-dependencies with: @@ -61,7 +61,7 @@ jobs: java-version-file: .java-version distribution: temurin - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 # Cleanup directories before proceeding with setup - name: Clean up old installations diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 1df004b1fc..6a95f7b2df 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -47,7 +47,7 @@ jobs: with: java-version-file: .java-version distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@017a9effdb900e5b5b2fddfb590a105619dca3c3 # v4.4.2 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Publish patched dependencies to maven local uses: ./.github/actions/patch-dependencies From 727418d2b2af1cc10beb9bfbf55895af77c478b6 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Tue, 16 Sep 2025 16:34:35 -0700 Subject: [PATCH 50/83] add CHANGELOG.md (#1187) *Issue #, if available:* *Description of changes:* Add CHANGELOG.md to track future features and fixes made to ADOT. Updated pr-build.yml workflow to check that CHANGELOG.md has been updated for all changes that affect SDK behavior. Updated pre-release-prepare.yml workflow to update CHANGELOG in both release series branch, moving the Unreleased changes under a header for the new release version. Updated post-release-version-bump.yml to merge CHANGELOG back into main, resolving any conflicts. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../workflows/post-release-version-bump.yml | 28 ++++++++++++- .github/workflows/pr-build.yml | 42 +++++++++++++++++++ .github/workflows/pre-release-prepare.yml | 8 ++++ CHANGELOG.md | 14 +++++++ 4 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 CHANGELOG.md diff --git a/.github/workflows/post-release-version-bump.yml b/.github/workflows/post-release-version-bump.yml index 7f619bbbe9..6413b86d14 100644 --- a/.github/workflows/post-release-version-bump.yml +++ b/.github/workflows/post-release-version-bump.yml @@ -6,6 +6,10 @@ on: version: description: 'Version number (e.g., 1.0.1)' required: true + is_patch: + description: 'Is this a patch? (true or false)' + required: true + default: 'false' env: AWS_DEFAULT_REGION: us-east-1 @@ -100,8 +104,20 @@ jobs: sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${DEV_VERSION}\"/" version.gradle.kts VERSION="${{ github.event.inputs.version }}" sed -i'' -e 's/adot-autoinstrumentation-java:v2.*"/adot-autoinstrumentation-java:v'$VERSION'"/' .github/workflows/daily-scan.yml + + # for patch releases, avoid merge conflict by manually resolving CHANGELOG with main + if [[ "${{ github.event.inputs.is_patch }}" == "true" ]]; then + # Copy the patch release entries + sed -n "/^## v${VERSION}/,/^## v[0-9]/p" CHANGELOG.md | sed '$d' > /tmp/patch_release_section.txt + git fetch origin main + git show origin/main:CHANGELOG.md > CHANGELOG.md + # Insert the patch release entries after Unreleased + awk -i inplace '/^## v[0-9]/ && !inserted { system("cat /tmp/patch_release_section.txt"); inserted=1 } {print}' CHANGELOG.md + fi + git add version.gradle.kts git add .github/workflows/daily-scan.yml + git add CHANGELOG.md git commit -m "Prepare main for next development cycle: Update version to $DEV_VERSION" git push --set-upstream origin "prepare-main-for-next-dev-cycle-${VERSION}" @@ -117,4 +133,14 @@ jobs: By submitting this pull request, I confirm that you can use, modify, copy, and redistribute this contribution, under the terms of your choice." \ --head prepare-main-for-next-dev-cycle-${VERSION} \ - --base main \ No newline at end of file + --base main + + - name: Force our CHANGELOG to override merge conflicts + run: | + git merge origin/main || true + git checkout --ours CHANGELOG.md + git add CHANGELOG.md + if ! git diff --quiet --cached; then + git commit -m "Force our CHANGELOG to override merge conflicts" + git push origin "prepare-main-for-next-dev-cycle-${VERSION}" + fi \ No newline at end of file diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 5bd4eba669..5b343c82be 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -1,6 +1,12 @@ name: PR Build on: pull_request: + types: + - opened + - reopened + - synchronize + - labeled + - unlabeled branches: - main - "release/v*" @@ -8,6 +14,42 @@ env: TEST_TAG: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:test-v2 jobs: + changelog-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check CHANGELOG + run: | + # Check if PR is from workflows bot or dependabot + if [[ "${{ github.event.pull_request.user.login }}" == "aws-application-signals-bot" ]]; then + echo "Skipping check: PR from aws-application-signals-bot" + exit 0 + fi + + if [[ "${{ github.event.pull_request.user.login }}" == "dependabot[bot]" ]]; then + echo "Skipping check: PR from dependabot" + exit 0 + fi + + # Check for skip changelog label + if echo '${{ toJSON(github.event.pull_request.labels.*.name) }}' | jq -r '.[]' | grep -q "skip changelog"; then + echo "Skipping check: skip changelog label found" + exit 0 + fi + + # Fetch base branch and check for CHANGELOG modifications + git fetch origin ${{ github.base_ref }} + if git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -q "CHANGELOG.md"; then + echo "CHANGELOG.md entry found - check passed" + exit 0 + fi + + echo "It looks like you didn't add an entry to CHANGELOG.md. If this change affects the SDK behavior, please update CHANGELOG.md and link this PR in your entry. If this PR does not need a CHANGELOG entry, you can add the 'Skip Changelog' label to this PR." + exit 1 + testpatch: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core diff --git a/.github/workflows/pre-release-prepare.yml b/.github/workflows/pre-release-prepare.yml index 9e4c634467..3459ef288d 100644 --- a/.github/workflows/pre-release-prepare.yml +++ b/.github/workflows/pre-release-prepare.yml @@ -94,6 +94,14 @@ jobs: git commit -am "Update version to ${VERSION}" git push origin "v${VERSION}_release" + - name: Update CHANGELOG for release + if: github.event.inputs.is_patch != 'true' + run: | + sed -i "s/## Unreleased/## Unreleased\n\n## v${VERSION} - $(date +%Y-%m-%d)/" CHANGELOG.md + git add CHANGELOG.md + git commit -m "Update CHANGELOG for version ${VERSION}" + git push origin "v${VERSION}_release" + - name: Create pull request against the release branch env: GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000..4732100a6e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,14 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +> **Note:** This CHANGELOG was created starting after version 2.11.5. Earlier changes are not documented here. + +For any change that affects end users of this package, please add an entry under the **Unreleased** section. Briefly summarize the change and provide the link to the PR. Example: + +- add SigV4 authentication for HTTP exporter + ([#1019](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1019)) + +If your change does not need a CHANGELOG entry, add the "skip changelog" label to your PR. + +## Unreleased From 2f83fa269fd358f1979e98d8d6ea831b79281559 Mon Sep 17 00:00:00 2001 From: Mahad Janjua <134644284+majanjua-amzn@users.noreply.github.com> Date: Wed, 17 Sep 2025 09:03:49 -0700 Subject: [PATCH 51/83] fix: Replace dev0 with SNAPSHOT (#1192) --- .github/workflows/post-release-version-bump.yml | 4 ++-- version.gradle.kts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/post-release-version-bump.yml b/.github/workflows/post-release-version-bump.yml index 6413b86d14..93835b88de 100644 --- a/.github/workflows/post-release-version-bump.yml +++ b/.github/workflows/post-release-version-bump.yml @@ -100,7 +100,7 @@ jobs: - name: Update version to next development version in main run: | - DEV_VERSION="${{ github.event.inputs.version }}.dev0" + DEV_VERSION="${{ github.event.inputs.version }}-SNAPSHOT" sed -i'' -e "s/val adotVersion = \".*\"/val adotVersion = \"${DEV_VERSION}\"/" version.gradle.kts VERSION="${{ github.event.inputs.version }}" sed -i'' -e 's/adot-autoinstrumentation-java:v2.*"/adot-autoinstrumentation-java:v'$VERSION'"/' .github/workflows/daily-scan.yml @@ -125,7 +125,7 @@ jobs: env: GITHUB_TOKEN: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} run: | - DEV_VERSION="${{ github.event.inputs.version }}.dev0" + DEV_VERSION="${{ github.event.inputs.version }}-SNAPSHOT" gh pr create --title "Post release $VERSION: Update version to $DEV_VERSION" \ --body "This PR prepares the main branch for the next development cycle by updating the version to $DEV_VERSION and updating the image version to be scanned to the latest released. diff --git a/version.gradle.kts b/version.gradle.kts index 586fdc18f3..9882736ff9 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -13,7 +13,7 @@ * permissions and limitations under the License. */ -val adotVersion = "2.18.0-dev0" +val adotVersion = "2.18.0-SNAPSHOT" allprojects { version = if (project.hasProperty("release.version")) { From 069ea22c27b58e11a2daf09adf104eb6ca51ef04 Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Wed, 17 Sep 2025 12:32:55 -0700 Subject: [PATCH 52/83] update for 8/14 non-release workflow documents (#1193) This PR updates the 3P actions in 8/14 *non-release* workflow files. References: https://github.com/aws-actions/configure-aws-credentials https://github.com/actions/setup-node https://github.com/actions/download-artifact https://github.com/github/codeql-action https://github.com/aws-actions/aws-secretsmanager-get-secrets https://github.com/docker/login-action https://github.com/actions/upload-artifact https://github.com/docker/setup-buildx-action https://github.com/docker/setup-qemu-action https://github.com/actions/download-artifact https://github.com/docker/build-push-action https://github.com/actions/cache https://github.com/github/codeql-action https://github.com/docker/setup-buildx-action https://github.com/codecov/codecov-action https://github.com/actions/setup-go https://github.com/hashicorp/setup-terraform https://github.com/actions/setup-java By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --------- Co-authored-by: Thomas Pierce --- .../application-signals-e2e-test.yml | 4 +- .github/workflows/codeql-analysis.yml | 10 ++-- .github/workflows/daily-scan.yml | 14 +++--- .../workflows/docker-build-corretto-slim.yml | 12 ++--- .../docker-build-smoke-tests-fake-backend.yml | 10 ++-- .../e2e-tests-app-with-java-agent.yml | 36 ++++++------- .github/workflows/e2e-tests-with-operator.yml | 26 +++++----- .github/workflows/main-build.yml | 50 +++++++++---------- 8 files changed, 81 insertions(+), 81 deletions(-) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index cb5abc2d66..49bddf0360 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -26,12 +26,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #5.0.0 with: name: aws-opentelemetry-agent.jar diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ab3dd2e5cb..cdfa151ad5 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,11 +23,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 with: languages: java @@ -37,7 +37,7 @@ jobs: distribution: temurin - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -53,9 +53,9 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Manually build to avoid autobuild failures - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 with: arguments: build - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index 3b86480666..0a21d19167 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo for dependency scan - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 with: fetch-depth: 0 @@ -37,13 +37,13 @@ jobs: distribution: 'temurin' - name: Configure AWS credentials for dependency scan - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get NVD API key for dependency scan - uses: aws-actions/aws-secretsmanager-get-secrets@v2 + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 id: nvd_api_key with: secret-ids: ${{ secrets.NVD_API_KEY_SECRET_ARN }} @@ -53,7 +53,7 @@ jobs: uses: ./.github/actions/patch-dependencies - name: Build JAR - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 with: arguments: assemble -PlocalDocker=true @@ -79,13 +79,13 @@ jobs: run: less dependency-check-report.html - name: Configure AWS credentials for image scan - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Login to Public ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -127,7 +127,7 @@ jobs: - name: Configure AWS Credentials for emitting metrics if: always() - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index d883c4a9de..6c61ffdd3a 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -19,24 +19,24 @@ jobs: build-corretto: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 - name: Build docker image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #6.18.0 with: push: true context: scripts/docker/corretto-slim diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 7d774e4f11..db72b13e50 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -20,14 +20,14 @@ jobs: build-docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -40,16 +40,16 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push docker image - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 with: arguments: :smoke-tests:fakebackend:jib diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index d4f290ccd5..7b4e336eb5 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -25,11 +25,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -37,7 +37,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -51,27 +51,27 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push agent and testing docker images with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 with: arguments: jib env: COMMIT_HASH: ${{ inputs.image_tag }} - - uses: codecov/codecov-action@v5 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 test_Spring_App_With_Java_Agent: name: Test Spring App with AWS OTel Java agent @@ -79,19 +79,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -110,19 +110,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -141,19 +141,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 5ff4473302..71928b47e4 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -34,7 +34,7 @@ jobs: build-sample-app: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 @@ -46,7 +46,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -60,18 +60,18 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push Sample-Apps without Auto-Instrumentation Agent - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 with: arguments: jibBuildWithoutAgent env: @@ -84,20 +84,20 @@ jobs: test-case-batch-value: ${{ steps.set-batches.outputs.batch-values }} steps: - name: Checkout Testing Framework repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework ref: ${{ inputs.test_ref }} - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up Go 1.x - uses: actions/setup-go@v6 + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 #v6.0.0 with: go-version: '~1.18.9' @@ -126,24 +126,24 @@ jobs: steps: # required for versioning - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: distribution: 'zulu' java-version: '11' - name: Set up terraform - uses: hashicorp/setup-terraform@v3 + uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 with: terraform_version: "~1.5" - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.JAVA_INSTRUMENTATION_INTEG_TEST_ARN}} aws-region: us-west-2 @@ -151,7 +151,7 @@ jobs: role-duration-seconds: 14400 - name: Checkout Testing Framework repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index b8db13f7c7..ebd4f5e0e5 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -22,24 +22,24 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v5 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@v4 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 #v5.0.0 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -54,17 +54,17 @@ jobs: staging_registry: ${{ steps.imageOutput.outputs.stagingRegistry }} staging_repository: ${{ steps.imageOutput.outputs.stagingRepository }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -76,21 +76,21 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 with: arguments: build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true env: @@ -128,7 +128,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -189,30 +189,30 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version: 23 distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -222,17 +222,17 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 with: arguments: contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -241,12 +241,12 @@ jobs: run: | ./build-layer.sh - name: Upload layer zip to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-java-layer.zip path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 @@ -270,7 +270,7 @@ jobs: if: always() steps: - name: Configure AWS Credentials for emitting metrics - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: us-east-1 From aa4a68456d5d6b632953c9f4296ee8fe5a5bc54f Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Wed, 17 Sep 2025 13:42:17 -0700 Subject: [PATCH 53/83] Revert "update for 8/14 non-release workflow documents (#1193)" (#1195) This reverts commit 069ea22c27b58e11a2daf09adf104eb6ca51ef04. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../application-signals-e2e-test.yml | 4 +- .github/workflows/codeql-analysis.yml | 10 ++-- .github/workflows/daily-scan.yml | 14 +++--- .../workflows/docker-build-corretto-slim.yml | 12 ++--- .../docker-build-smoke-tests-fake-backend.yml | 10 ++-- .../e2e-tests-app-with-java-agent.yml | 36 ++++++------- .github/workflows/e2e-tests-with-operator.yml | 26 +++++----- .github/workflows/main-build.yml | 50 +++++++++---------- 8 files changed, 81 insertions(+), 81 deletions(-) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index 49bddf0360..cb5abc2d66 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -26,12 +26,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #5.0.0 + - uses: actions/download-artifact@v5 with: name: aws-opentelemetry-agent.jar diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index cdfa151ad5..ab3dd2e5cb 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,11 +23,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + uses: actions/checkout@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 + uses: github/codeql-action/init@v3 with: languages: java @@ -37,7 +37,7 @@ jobs: distribution: temurin - name: Cache local Maven repository - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -53,9 +53,9 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Manually build to avoid autobuild failures - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: build - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index 0a21d19167..3b86480666 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo for dependency scan - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + uses: actions/checkout@v5 with: fetch-depth: 0 @@ -37,13 +37,13 @@ jobs: distribution: 'temurin' - name: Configure AWS credentials for dependency scan - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get NVD API key for dependency scan - uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 + uses: aws-actions/aws-secretsmanager-get-secrets@v2 id: nvd_api_key with: secret-ids: ${{ secrets.NVD_API_KEY_SECRET_ARN }} @@ -53,7 +53,7 @@ jobs: uses: ./.github/actions/patch-dependencies - name: Build JAR - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: assemble -PlocalDocker=true @@ -79,13 +79,13 @@ jobs: run: less dependency-check-report.html - name: Configure AWS credentials for image scan - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Login to Public ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws @@ -127,7 +127,7 @@ jobs: - name: Configure AWS Credentials for emitting metrics if: always() - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index 6c61ffdd3a..d883c4a9de 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -19,24 +19,24 @@ jobs: build-corretto: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + - uses: actions/checkout@v5 - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 + uses: docker/setup-buildx-action@v3 - name: Build docker image - uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #6.18.0 + uses: docker/build-push-action@v6 with: push: true context: scripts/docker/corretto-slim diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index db72b13e50..7d774e4f11 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -20,14 +20,14 @@ jobs: build-docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + - uses: actions/checkout@v5 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -40,16 +40,16 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws - name: Build and push docker image - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: :smoke-tests:fakebackend:jib diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index 7b4e336eb5..d4f290ccd5 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -25,11 +25,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Java Instrumentation repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -37,7 +37,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -51,27 +51,27 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws - name: Build and push agent and testing docker images with Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: jib env: COMMIT_HASH: ${{ inputs.image_tag }} - - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 + - uses: codecov/codecov-action@v5 test_Spring_App_With_Java_Agent: name: Test Spring App with AWS OTel Java agent @@ -79,19 +79,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws @@ -110,19 +110,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws @@ -141,19 +141,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 71928b47e4..5ff4473302 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -34,7 +34,7 @@ jobs: build-sample-app: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 with: fetch-depth: 0 @@ -46,7 +46,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -60,18 +60,18 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws - name: Build and push Sample-Apps without Auto-Instrumentation Agent - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: jibBuildWithoutAgent env: @@ -84,20 +84,20 @@ jobs: test-case-batch-value: ${{ steps.set-batches.outputs.batch-values }} steps: - name: Checkout Testing Framework repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@v5 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework ref: ${{ inputs.test_ref }} - name: Checkout Java Instrumentation repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@v5 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up Go 1.x - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 #v6.0.0 + uses: actions/setup-go@v6 with: go-version: '~1.18.9' @@ -126,24 +126,24 @@ jobs: steps: # required for versioning - name: Checkout Java Instrumentation repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@v5 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: distribution: 'zulu' java-version: '11' - name: Set up terraform - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 + uses: hashicorp/setup-terraform@v3 with: terraform_version: "~1.5" - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.JAVA_INSTRUMENTATION_INTEG_TEST_ARN}} aws-region: us-west-2 @@ -151,7 +151,7 @@ jobs: role-duration-seconds: 14400 - name: Checkout Testing Framework repository - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + uses: actions/checkout@v5 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index ebd4f5e0e5..b8db13f7c7 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -22,24 +22,24 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/checkout@v5 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 #v5.0.0 + uses: actions/setup-node@v4 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -54,17 +54,17 @@ jobs: staging_registry: ${{ steps.imageOutput.outputs.stagingRegistry }} staging_repository: ${{ steps.imageOutput.outputs.stagingRepository }} steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -76,21 +76,21 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws - name: Build snapshot with Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true env: @@ -128,7 +128,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 + uses: actions/upload-artifact@v4 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -189,30 +189,30 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version: 23 distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 + uses: docker/login-action@v3 with: registry: public.ecr.aws # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 + uses: actions/cache@v3 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -222,17 +222,17 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Build snapshot with Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + uses: gradle/gradle-build-action@v3 with: arguments: contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -241,12 +241,12 @@ jobs: run: | ./build-layer.sh - name: Upload layer zip to GitHub Actions - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 + uses: actions/upload-artifact@v4 with: name: aws-opentelemetry-java-layer.zip path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 @@ -270,7 +270,7 @@ jobs: if: always() steps: - name: Configure AWS Credentials for emitting metrics - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + uses: aws-actions/configure-aws-credentials@v4 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: us-east-1 From 634b267315ac6b3e02f2d096ef7f83cd565f16c2 Mon Sep 17 00:00:00 2001 From: Jonathan Lee <107072447+jj22ee@users.noreply.github.com> Date: Wed, 17 Sep 2025 14:52:20 -0700 Subject: [PATCH 54/83] Fix regions and add clarity in Lambda Layer release workflow (#1185) *Issue #, if available:* https://github.com/aws-observability/aws-otel-python-instrumentation/pull/458 *Description of changes:* 1. Remove new regions from COMMERCIAL_REGIONS 1. Rename COMMERCIAL_REGIONS to LEGACY_COMMERCIAL_REGIONS 1. Add comments for clarity By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/release-lambda.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index a171012d76..bc1263180f 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -12,7 +12,8 @@ on: default: 'us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1, af-south-1, ap-east-1, ap-south-2, ap-southeast-3, ap-southeast-4, eu-central-2, eu-south-1, eu-south-2, il-central-1, me-central-1, me-south-1, ap-southeast-5, ap-southeast-7, mx-central-1, ca-west-1, cn-north-1, cn-northwest-1' env: - COMMERCIAL_REGIONS: us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1, ap-southeast-5, ap-southeast-7, mx-central-1, ca-west-1, cn-north-1, cn-northwest-1 + # Legacy list of commercial regions to deploy to. New regions should NOT be added here, and instead should be added to the `aws_region` default input to the workflow. + LEGACY_COMMERCIAL_REGIONS: us-east-1, us-east-2, us-west-1, us-west-2, ap-south-1, ap-northeast-3, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-northeast-1, ca-central-1, eu-central-1, eu-west-1, eu-west-2, eu-west-3, eu-north-1, sa-east-1 LAYER_NAME: AWSOpenTelemetryDistroJava permissions: @@ -67,21 +68,21 @@ jobs: steps: - name: role arn env: - COMMERCIAL_REGIONS: ${{ env.COMMERCIAL_REGIONS }} + LEGACY_COMMERCIAL_REGIONS: ${{ env.LEGACY_COMMERCIAL_REGIONS }} run: | - COMMERCIAL_REGIONS_ARRAY=(${COMMERCIAL_REGIONS//,/ }) + LEGACY_COMMERCIAL_REGIONS_ARRAY=(${LEGACY_COMMERCIAL_REGIONS//,/ }) FOUND=false - for REGION in "${COMMERCIAL_REGIONS_ARRAY[@]}"; do + for REGION in "${LEGACY_COMMERCIAL_REGIONS_ARRAY[@]}"; do if [[ "$REGION" == "${{ matrix.aws_region }}" ]]; then FOUND=true break fi done if [ "$FOUND" = true ]; then - echo "Found ${{ matrix.aws_region }} in COMMERCIAL_REGIONS" + echo "Found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" SECRET_KEY="LAMBDA_LAYER_RELEASE" else - echo "Not found ${{ matrix.aws_region }} in COMMERCIAL_REGIONS" + echo "Not found ${{ matrix.aws_region }} in LEGACY_COMMERCIAL_REGIONS" SECRET_KEY="${{ matrix.aws_region }}_LAMBDA_LAYER_RELEASE" fi SECRET_KEY=${SECRET_KEY//-/_} From 82a5538e9bb14b31c31a8ac79dbdcef803d1d2c9 Mon Sep 17 00:00:00 2001 From: Lei Wang <66336933+wangzlei@users.noreply.github.com> Date: Wed, 17 Sep 2025 16:21:54 -0700 Subject: [PATCH 55/83] Update image to 2.11.5 in daily-scan.yml (#1194) *Issue #, if available:* *Description of changes:* By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/daily-scan.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index 3b86480666..76e10b0bff 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -112,7 +112,7 @@ jobs: id: high_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.4" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.5" severity: 'CRITICAL,HIGH' logout: 'false' @@ -121,7 +121,7 @@ jobs: id: low_scan_v2 uses: ./.github/actions/image_scan with: - image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.4" + image-ref: "public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v2.11.5" severity: 'MEDIUM,LOW,UNKNOWN' logout: 'false' From 8e6889d69ece3a2d0423339d2c370d06d9e34c99 Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Thu, 18 Sep 2025 12:32:09 -0700 Subject: [PATCH 56/83] Reverting previous revert for 3p actions update (#1198) This pr reverts the previous revert for 3p action updates for non release files. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .../application-signals-e2e-test.yml | 4 +- .github/workflows/codeql-analysis.yml | 15 ++--- .github/workflows/daily-scan.yml | 19 +++--- .../workflows/docker-build-corretto-slim.yml | 12 ++-- .../docker-build-smoke-tests-fake-backend.yml | 15 ++--- .../e2e-tests-app-with-java-agent.yml | 41 ++++++------- .github/workflows/e2e-tests-with-operator.yml | 31 +++++----- .github/workflows/main-build.yml | 60 ++++++++++--------- 8 files changed, 102 insertions(+), 95 deletions(-) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index cb5abc2d66..49bddf0360 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -26,12 +26,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #5.0.0 with: name: aws-opentelemetry-agent.jar diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ab3dd2e5cb..add4ca8507 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -23,11 +23,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 with: languages: java @@ -37,7 +37,7 @@ jobs: distribution: temurin - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -52,10 +52,11 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + - name: Manually build to avoid autobuild failures - uses: gradle/gradle-build-action@v3 - with: - arguments: build + run: ./gradlew build - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index 76e10b0bff..e1d175629b 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -26,7 +26,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repo for dependency scan - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 with: fetch-depth: 0 @@ -37,13 +37,13 @@ jobs: distribution: 'temurin' - name: Configure AWS credentials for dependency scan - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.SECRET_MANAGER_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get NVD API key for dependency scan - uses: aws-actions/aws-secretsmanager-get-secrets@v2 + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 id: nvd_api_key with: secret-ids: ${{ secrets.NVD_API_KEY_SECRET_ARN }} @@ -52,10 +52,11 @@ jobs: - name: Publish patched dependencies to maven local uses: ./.github/actions/patch-dependencies + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + - name: Build JAR - uses: gradle/gradle-build-action@v3 - with: - arguments: assemble -PlocalDocker=true + run: ./gradlew assemble -PlocalDocker=true # See http://jeremylong.github.io/DependencyCheck/dependency-check-cli/ for installation explanation - name: Install and run dependency scan @@ -79,13 +80,13 @@ jobs: run: less dependency-check-report.html - name: Configure AWS credentials for image scan - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Login to Public ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -127,7 +128,7 @@ jobs: - name: Configure AWS Credentials for emitting metrics if: always() - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} diff --git a/.github/workflows/docker-build-corretto-slim.yml b/.github/workflows/docker-build-corretto-slim.yml index d883c4a9de..6c61ffdd3a 100644 --- a/.github/workflows/docker-build-corretto-slim.yml +++ b/.github/workflows/docker-build-corretto-slim.yml @@ -19,24 +19,24 @@ jobs: build-corretto: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 - name: Build docker image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #6.18.0 with: push: true context: scripts/docker/corretto-slim diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index 7d774e4f11..f1b863528a 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -20,14 +20,14 @@ jobs: build-docker: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -40,16 +40,17 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + - name: Build and push docker image - uses: gradle/gradle-build-action@v3 - with: - arguments: :smoke-tests:fakebackend:jib + run: ./gradlew :smoke-tests:fakebackend:jib diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index d4f290ccd5..8d1f8200aa 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -25,11 +25,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin @@ -37,7 +37,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -51,27 +51,28 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Validate the checksums of Gradle Wrapper - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + - name: Build and push agent and testing docker images with Gradle - uses: gradle/gradle-build-action@v3 - with: - arguments: jib + run: ./gradlew jib env: COMMIT_HASH: ${{ inputs.image_tag }} - - uses: codecov/codecov-action@v5 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 test_Spring_App_With_Java_Agent: name: Test Spring App with AWS OTel Java agent @@ -79,19 +80,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -110,19 +111,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -141,19 +142,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 5ff4473302..9b73ff8c57 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -34,7 +34,7 @@ jobs: build-sample-app: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 @@ -46,7 +46,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -60,20 +60,21 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build and push Sample-Apps without Auto-Instrumentation Agent - uses: gradle/gradle-build-action@v3 - with: - arguments: jibBuildWithoutAgent + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + + - name: Build and push Sample-Apps with Auto-Instrumentation Agent + run: jibBuildWithoutAgent env: COMMIT_HASH: ${{ inputs.image_tag }} @@ -84,20 +85,20 @@ jobs: test-case-batch-value: ${{ steps.set-batches.outputs.batch-values }} steps: - name: Checkout Testing Framework repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework ref: ${{ inputs.test_ref }} - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up Go 1.x - uses: actions/setup-go@v6 + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 #v6.0.0 with: go-version: '~1.18.9' @@ -126,24 +127,24 @@ jobs: steps: # required for versioning - name: Checkout Java Instrumentation repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 path: aws-otel-java-instrumentation - name: Set up JDK 11 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: distribution: 'zulu' java-version: '11' - name: Set up terraform - uses: hashicorp/setup-terraform@v3 + uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 with: terraform_version: "~1.5" - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.JAVA_INSTRUMENTATION_INTEG_TEST_ARN}} aws-region: us-west-2 @@ -151,7 +152,7 @@ jobs: role-duration-seconds: 14400 - name: Checkout Testing Framework repository - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: repository: ${{ env.TESTING_FRAMEWORK_REPO }} path: testing-framework diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index b8db13f7c7..13b18d2ab1 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -22,24 +22,24 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v5 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@v4 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 #v5.0.0 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - uses: ./.github/actions/patch-dependencies with: run_tests: "true" @@ -54,17 +54,17 @@ jobs: staging_registry: ${{ steps.imageOutput.outputs.stagingRegistry }} staging_repository: ${{ steps.imageOutput.outputs.stagingRepository }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: temurin # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -76,23 +76,24 @@ jobs: gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_password: ${{ secrets.GPG_PASSPHRASE }} - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 + - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 - with: - arguments: build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true + run: ./gradlew build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true env: PUBLISH_TOKEN_USERNAME: ${{ secrets.PUBLISH_TOKEN_USERNAME }} PUBLISH_TOKEN_PASSWORD: ${{ secrets.PUBLISH_TOKEN_PASSWORD }} @@ -128,7 +129,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -189,30 +190,30 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version: 23 distribution: 'temurin' - - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 + - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws # cache local patch outputs - name: Cache local Maven repository id: cache-local-maven-repo - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -221,18 +222,19 @@ jobs: - name: Pull base image of Contract Tests Sample Apps run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine + - name: Setup Gradle + uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 - with: - arguments: contractTests -PlocalDocker=true + run: contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 - - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 + - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version distribution: 'temurin' @@ -241,12 +243,12 @@ jobs: run: | ./build-layer.sh - name: Upload layer zip to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-java-layer.zip path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: arn:aws:iam::${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ACCOUNT_ID }}:role/${{ secrets.APPLICATION_SIGNALS_E2E_TEST_ROLE_NAME }} aws-region: us-east-1 @@ -270,7 +272,7 @@ jobs: if: always() steps: - name: Configure AWS Credentials for emitting metrics - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: us-east-1 From 1a322795869f3e5f658620fc2262b7cdb06f996c Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Fri, 19 Sep 2025 09:06:59 -0700 Subject: [PATCH 57/83] Update main-build.yml (#1200) Missed ./gradlew command. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/main-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 13b18d2ab1..113b3b7d4e 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -226,7 +226,7 @@ jobs: uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - name: Build snapshot with Gradle - run: contractTests -PlocalDocker=true + run: ./gradlew contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest From f39896271386037f98dad4fa1ae4dcb66b1c3967 Mon Sep 17 00:00:00 2001 From: Thomas Pierce Date: Fri, 19 Sep 2025 10:11:37 -0700 Subject: [PATCH 58/83] Update Java lang version support from 22 to 23 (#1202) We depend on [OTEL Java 2.11.0](https://github.com/aws-observability/aws-otel-java-instrumentation/blob/release/v2.11.x/dependencyManagement/build.gradle.kts#L30C20-L30C26), which was released [Dec 23, 2024](https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/tag/v2.11.0), at which Java 23 was supported (released [2024-09-17 ](https://www.java.com/releases/)). We should have bumped this version then, but we didn't have a good process in place at the time. Bump version now. Skipping changelog as we will support Java 24 soon. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/application-signals-e2e-test.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index 49bddf0360..1f369b8316 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -83,14 +83,14 @@ jobs: java-version: '21' cpu-architecture: 'x86_64' - default-v22-amd64: + default-v23-amd64: needs: [ upload-main-build ] uses: aws-observability/aws-application-signals-test-framework/.github/workflows/java-ec2-default-test.yml@main secrets: inherit with: aws-region: us-east-1 caller-workflow-name: 'main-build' - java-version: '22' + java-version: '23' cpu-architecture: 'x86_64' java-ec2-adot-sigv4-test: @@ -147,7 +147,7 @@ jobs: caller-workflow-name: 'main-build' java-version: '21' - eks-v22-amd64: + eks-v23-amd64: needs: eks-v21-amd64 uses: aws-observability/aws-application-signals-test-framework/.github/workflows/java-eks-test.yml@main secrets: inherit @@ -156,7 +156,7 @@ jobs: test-cluster-name: 'e2e-adot-test' adot-image-name: ${{ inputs.adot-image-name }} caller-workflow-name: 'main-build' - java-version: '22' + java-version: '23' # # PACKAGED DISTRIBUTION PLATFORM COVERAGE @@ -229,7 +229,7 @@ jobs: # metric-limiter-v11-amd64: - needs: [ eks-v22-amd64 ] + needs: [ eks-v23-amd64 ] uses: aws-observability/aws-application-signals-test-framework/.github/workflows/metric-limiter-test.yml@main secrets: inherit with: From e01ce042415f086998738016c8418aa9a498509c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Sep 2025 19:04:07 +0000 Subject: [PATCH 59/83] Bump uuid from 1.18.0 to 1.18.1 in /tools/cp-utility (#1176) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [uuid](https://github.com/uuid-rs/uuid) from 1.18.0 to 1.18.1.
Release notes

Sourced from uuid's releases.

v1.18.1

What's Changed

Full Changelog: https://github.com/uuid-rs/uuid/compare/v1.18.0...v1.18.1

Commits
  • 50d8e79 Merge pull request #842 from uuid-rs/cargo/v1.18.1
  • 7948592 prepare for 1.18.1 release
  • 6d847c7 Merge pull request #841 from uuid-rs/chore/unsafe-cleanup
  • 675cccc re-gate zerocopy behind unstable feature flag
  • 4dd5828 Remove some unsafe; stabilize zerocopy
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=uuid&package-manager=cargo&previous-version=1.18.0&new-version=1.18.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) Dependabot will merge this PR once it's up-to-date and CI passes on it, as requested by @thpierce. [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Thomas Pierce --- tools/cp-utility/Cargo.lock | 4 ++-- tools/cp-utility/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/cp-utility/Cargo.lock b/tools/cp-utility/Cargo.lock index be6df5c070..f10bd59c49 100644 --- a/tools/cp-utility/Cargo.lock +++ b/tools/cp-utility/Cargo.lock @@ -201,9 +201,9 @@ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "uuid" -version = "1.18.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ "getrandom", "js-sys", diff --git a/tools/cp-utility/Cargo.toml b/tools/cp-utility/Cargo.toml index 8f04b613b6..3055ad8209 100644 --- a/tools/cp-utility/Cargo.toml +++ b/tools/cp-utility/Cargo.toml @@ -11,7 +11,7 @@ edition = "2021" [dev-dependencies] # dependencies only used during tests tempfile = "3.22.0" -uuid = { version = "1.18.0", features = ["v4", "fast-rng"] } +uuid = { version = "1.18.1", features = ["v4", "fast-rng"] } [profile.release] # Levers to optimize the binary for size From 4e04609b490b560c354f5567ca0008e7a6a9bdcc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Sep 2025 20:20:02 +0000 Subject: [PATCH 60/83] Bump com.diffplug.spotless from 6.25.0 to 7.0.3 (#1070) Bumps com.diffplug.spotless from 6.25.0 to 7.0.3. [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=com.diffplug.spotless&package-manager=gradle&previous-version=6.25.0&new-version=7.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) You can trigger a rebase of this PR by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) Dependabot will merge this PR once CI passes on it, as requested by @thpierce. [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
> **Note** > Automatic rebases have been disabled on this pull request as it has been open for over 30 days. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Thomas Pierce --- .../appsignals/test/base/JMXMetricsContractTestBase.java | 8 ++++---- .../javaagent/providers/AwsMetricAttributeGenerator.java | 8 ++++---- settings.gradle.kts | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java index 0b0fd6e7cd..b0871c6679 100644 --- a/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java +++ b/appsignals-tests/contract-tests/src/test/java/software/amazon/opentelemetry/appsignals/test/base/JMXMetricsContractTestBase.java @@ -60,10 +60,10 @@ protected void assertMetrics() { protected long getThreshold(String metricName) { long threshold = 0; switch (metricName) { - // If maximum memory size is undefined, then value is -1 - // https://docs.oracle.com/en/java/javase/17/docs/api/java.management/java/lang/management/MemoryUsage.html#getMax() - // Thread count can be negative when excutor is null - // https://github.com/apache/tomcat/blob/1afe41491f0e56ec0a776db5ff84607f87ce6640/java/org/apache/tomcat/util/net/AbstractEndpoint.java#L1204 + // If maximum memory size is undefined, then value is -1 + // https://docs.oracle.com/en/java/javase/17/docs/api/java.management/java/lang/management/MemoryUsage.html#getMax() + // Thread count can be negative when excutor is null + // https://github.com/apache/tomcat/blob/1afe41491f0e56ec0a776db5ff84607f87ce6640/java/org/apache/tomcat/util/net/AbstractEndpoint.java#L1204 case JMXMetricsConstants.TOMCAT_THREADS: threshold = -2; break; diff --git a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java index 2d913e0269..62559c9b2d 100644 --- a/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java +++ b/awsagentprovider/src/main/java/software/amazon/opentelemetry/javaagent/providers/AwsMetricAttributeGenerator.java @@ -455,8 +455,8 @@ private static String normalizeRemoteServiceName(SpanData span, String serviceNa case "AmazonSQS": // AWS SDK v1 case "Sqs": // AWS SDK v2 return NORMALIZED_SQS_SERVICE_NAME; - // For Bedrock, Bedrock Agent, and Bedrock Agent Runtime, we can align with AWS Cloud - // Control and use AWS::Bedrock for RemoteService. + // For Bedrock, Bedrock Agent, and Bedrock Agent Runtime, we can align with AWS Cloud + // Control and use AWS::Bedrock for RemoteService. case "AmazonBedrock": // AWS SDK v1 case "Bedrock": // AWS SDK v2 case "AWSBedrockAgentRuntime": // AWS SDK v1 @@ -464,8 +464,8 @@ private static String normalizeRemoteServiceName(SpanData span, String serviceNa case "AWSBedrockAgent": // AWS SDK v1 case "BedrockAgent": // AWS SDK v2 return NORMALIZED_BEDROCK_SERVICE_NAME; - // For BedrockRuntime, we are using AWS::BedrockRuntime as the associated remote resource - // (Model) is not listed in Cloud Control. + // For BedrockRuntime, we are using AWS::BedrockRuntime as the associated remote resource + // (Model) is not listed in Cloud Control. case "AmazonBedrockRuntime": // AWS SDK v1 case "BedrockRuntime": // AWS SDK v2 return NORMALIZED_BEDROCK_RUNTIME_SERVICE_NAME; diff --git a/settings.gradle.kts b/settings.gradle.kts index 6c44234701..74b4b8ae3d 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -15,7 +15,7 @@ pluginManagement { plugins { - id("com.diffplug.spotless") version "6.25.0" + id("com.diffplug.spotless") version "7.0.3" id("com.github.ben-manes.versions") version "0.50.0" id("com.github.jk1.dependency-license-report") version "2.5" id("com.gradleup.shadow") version "8.3.5" From d236d791795c59893eb9cb1270afcaa7494191b5 Mon Sep 17 00:00:00 2001 From: Thomas Pierce Date: Fri, 19 Sep 2025 14:08:26 -0700 Subject: [PATCH 61/83] Fix typo (#1207) Fixes annoying warning seen here: https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1206/files#diff-dd2c0eb6ea5cfc6c4bd4eac30934e2d5746747af48fef6da689e85b752f39557 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 89b45f11e9..20c27adff4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ # permissions and limitations under the License. # Stage 1: Build the cp-utility binary -FROM public.ecr.aws/docker/library/rust:1.89 as builder +FROM public.ecr.aws/docker/library/rust:1.89 AS builder WORKDIR /usr/src/cp-utility COPY ./tools/cp-utility . From 551937dd89958de6f2983de363a86107e0fd47c4 Mon Sep 17 00:00:00 2001 From: Jonathan Lee <107072447+jj22ee@users.noreply.github.com> Date: Fri, 19 Sep 2025 17:09:51 -0700 Subject: [PATCH 62/83] Support Trace Context extraction from Lambda Context object, and respect user-configured OTEL_PROPAGATORS (#1191) *Issue #, if available:* - In latest `com.amazonaws:aws-lambda-java-core:1.4.0`, Lambda Context has a new `lambdaContext.getXrayTraceId()` method. We need to use this over SystemProperty/EnvVar to support multi-concurrency in Lambda. *Description of changes:* - respect OTEL_PROPAGATORS Env Var priority - Priority logic BEFORE: - Create `carrierA` to contain headers from Lambda request http headers and the custom client context - get X-Ray Trace ID from (in order or priority) SystemProperty or EnvVar, add it to new `carrierB` - Use Global Propagator to extract trace context from `carrierB` (trace extraction only works if xray propagator is configured) - If above trace extraction results in a context that is not valid&sampled, try again with Global Propagator using `carrierA` - Priority logic AFTER: - Create `carrierA` to contain headers from Lambda request http headers and the custom client context - get X-Ray Trace ID from (in order or priority) Lambda Context, SystemProperty, or EnvVar, add it to `carrierA` (will overwrite x-ray header value if present from Lambda request http headers). - Use Global Propagator to extract trace context from `carrierA` By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- CHANGELOG.md | 5 + lambda-layer/otel-instrument | 2 +- .../opentelemetry-java-instrumentation.patch | 412 +++++++++++++++--- 3 files changed, 366 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4732100a6e..dcfaa0543d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,3 +12,8 @@ For any change that affects end users of this package, please add an entry under If your change does not need a CHANGELOG entry, add the "skip changelog" label to your PR. ## Unreleased + +### Enhancements + +- Support X-Ray Trace Id extraction from Lambda Context object, and respect user-configured OTEL_PROPAGATORS in AWS Lamdba instrumentation + ([#1191](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1191)) \ No newline at end of file diff --git a/lambda-layer/otel-instrument b/lambda-layer/otel-instrument index 7fd2366d1e..a718c8ab75 100644 --- a/lambda-layer/otel-instrument +++ b/lambda-layer/otel-instrument @@ -2,7 +2,7 @@ export OTEL_INSTRUMENTATION_AWS_SDK_EXPERIMENTAL_SPAN_ATTRIBUTES=true -export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,xray,tracecontext}" +export OTEL_PROPAGATORS="${OTEL_PROPAGATORS:-baggage,tracecontext,xray}" export OTEL_SERVICE_NAME=${OTEL_SERVICE_NAME:-${AWS_LAMBDA_FUNCTION_NAME}} diff --git a/lambda-layer/patches/opentelemetry-java-instrumentation.patch b/lambda-layer/patches/opentelemetry-java-instrumentation.patch index 9f4baa1481..c90c3bb0fa 100644 --- a/lambda-layer/patches/opentelemetry-java-instrumentation.patch +++ b/lambda-layer/patches/opentelemetry-java-instrumentation.patch @@ -1,5 +1,78 @@ +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java +index 93071e04d2..add9f64276 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestHandlerInstrumentation.java +@@ -68,7 +68,7 @@ public class AwsLambdaRequestHandlerInstrumentation implements TypeInstrumentati + @Advice.Local("otelContext") io.opentelemetry.context.Context otelContext, + @Advice.Local("otelScope") Scope otelScope) { + input = AwsLambdaRequest.create(context, arg, Collections.emptyMap()); +- io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(input); ++ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(input, context); + + if (!functionInstrumenter().shouldStart(parentContext, input)) { + return; +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java +index a6b89d253d..e62d30eddb 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdacore/v1_0/AwsLambdaRequestStreamHandlerInstrumentation.java +@@ -69,7 +69,7 @@ public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrum + @Advice.Local("otelScope") Scope otelScope) { + + otelInput = AwsLambdaRequest.create(context, input, Collections.emptyMap()); +- io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput); ++ io.opentelemetry.context.Context parentContext = functionInstrumenter().extract(otelInput, context); + + if (!functionInstrumenter().shouldStart(parentContext, otelInput)) { + return; +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts +index df605add2f..b2f01d9d4d 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/build.gradle.kts +@@ -9,7 +9,7 @@ dependencies { + compileOnly("com.google.auto.value:auto-value-annotations") + annotationProcessor("com.google.auto.value:auto-value") + +- library("com.amazonaws:aws-lambda-java-core:1.0.0") ++ library("com.amazonaws:aws-lambda-java-core:1.4.0") + + // We do lightweight parsing of JSON to extract HTTP headers from requests for propagation. + // This will be commonly needed even for users that don't use events, but luckily it's not too big. +@@ -26,6 +26,7 @@ dependencies { + + testImplementation(project(":instrumentation:aws-lambda:aws-lambda-core-1.0:testing")) + testImplementation("uk.org.webcompere:system-stubs-jupiter") ++ testImplementation("com.google.guava:guava") + } + + tasks.withType().configureEach { +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java +index 873040f66e..b38648e8cf 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/TracingRequestHandler.java +@@ -66,7 +66,7 @@ public abstract class TracingRequestHandler implements RequestHandler headers = input.getHeaders(); if (input.getAwsContext() != null && input.getAwsContext().getClientContext() != null) { -@@ -59,23 +57,15 @@ public class AwsLambdaFunctionInstrumenter { +@@ -59,23 +58,15 @@ public class AwsLambdaFunctionInstrumenter { } } - + - return openTelemetry - .getPropagators() - .getTextMapPropagator() - .extract(Context.root(), headers, MapGetter.INSTANCE); -+ return ParentContextExtractor.extract(headers, this); ++ return ParentContextExtractor.extract(headers, this, lambdaContext); } - + - private enum MapGetter implements TextMapGetter> { - INSTANCE; - @@ -58,7 +134,7 @@ index 9341bf6f79..2208c3c482 100644 - } + public Context extract(Map headers, TextMapGetter> getter) { + ContextPropagationDebug.debugContextLeakIfEnabled(); - + - @Override - public String get(Map map, String s) { - return map.get(s.toLowerCase(Locale.ROOT)); @@ -71,10 +147,10 @@ index 9341bf6f79..2208c3c482 100644 } diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java new file mode 100644 -index 0000000000..439ed0de07 +index 0000000000..e711558e05 --- /dev/null +++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/main/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractor.java -@@ -0,0 +1,77 @@ +@@ -0,0 +1,68 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 @@ -84,11 +160,8 @@ index 0000000000..439ed0de07 + +import static io.opentelemetry.instrumentation.awslambdacore.v1_0.internal.MapUtils.lowercaseMap; + -+import io.opentelemetry.api.trace.Span; -+import io.opentelemetry.api.trace.SpanContext; +import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.TextMapGetter; -+import java.util.Collections; +import java.util.Locale; +import java.util.Map; + @@ -103,39 +176,33 @@ index 0000000000..439ed0de07 + // lower-case map getter used for extraction + static final String AWS_TRACE_HEADER_PROPAGATOR_KEY = "x-amzn-trace-id"; + -+ static Context extract(Map headers, AwsLambdaFunctionInstrumenter instrumenter) { -+ Context parentContext = null; -+ String parentTraceHeader = getTraceHeader(); ++ static Context extract( ++ Map headers, ++ AwsLambdaFunctionInstrumenter instrumenter, ++ com.amazonaws.services.lambda.runtime.Context lambdaContext) { ++ Map mergedHeaders = lowercaseMap(headers); ++ String parentTraceHeader = getTraceHeader(lambdaContext); + if (parentTraceHeader != null) { -+ parentContext = instrumenter.extract( -+ Collections.singletonMap(AWS_TRACE_HEADER_PROPAGATOR_KEY, parentTraceHeader), -+ MapGetter.INSTANCE); -+ } -+ if (!isValidAndSampled(parentContext)) { -+ // try http -+ parentContext = instrumenter.extract(lowercaseMap(headers), MapGetter.INSTANCE); ++ mergedHeaders.put(AWS_TRACE_HEADER_PROPAGATOR_KEY, parentTraceHeader); + } -+ return parentContext; ++ return instrumenter.extract(mergedHeaders, MapGetter.INSTANCE); + } + -+ private static String getTraceHeader() { ++ private static String getTraceHeader( ++ com.amazonaws.services.lambda.runtime.Context lambdaContext) { ++ String traceHeader = lambdaContext.getXrayTraceId(); ++ if (traceHeader != null && !traceHeader.isEmpty()) { ++ return traceHeader; ++ } ++ + // Lambda propagates trace header by system property instead of environment variable from java17 -+ String traceHeader = System.getProperty(AWS_TRACE_HEADER_PROP); ++ traceHeader = System.getProperty(AWS_TRACE_HEADER_PROP); + if (traceHeader == null || traceHeader.isEmpty()) { + return System.getenv(AWS_TRACE_HEADER_ENV_KEY); + } + return traceHeader; + } + -+ private static boolean isValidAndSampled(Context context) { -+ if (context == null) { -+ return false; -+ } -+ Span parentSpan = Span.fromContext(context); -+ SpanContext parentSpanContext = parentSpan.getSpanContext(); -+ return (parentSpanContext.isValid() && parentSpanContext.isSampled()); -+ } -+ + private enum MapGetter implements TextMapGetter> { + INSTANCE; + @@ -152,12 +219,25 @@ index 0000000000..439ed0de07 + + private ParentContextExtractor() {} +} +diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java +index cb19d1e568..12ed174bb2 100644 +--- a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java ++++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/InstrumenterExtractionTest.java +@@ -37,7 +37,7 @@ class InstrumenterExtractionTest { + + AwsLambdaRequest input = AwsLambdaRequest.create(awsContext, new HashMap<>(), new HashMap<>()); + +- Context extracted = instr.extract(input); ++ Context extracted = instr.extract(input, awsContext); + SpanContext spanContext = Span.fromContext(extracted).getSpanContext(); + assertThat(spanContext.getTraceId()).isEqualTo("4bf92f3577b34da6a3ce929d0e0e4736"); + assertThat(spanContext.getSpanId()).isEqualTo("00f067aa0ba902b7"); diff --git a/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java new file mode 100644 -index 0000000000..1fa0b6e536 +index 0000000000..76fc823a65 --- /dev/null +++ b/instrumentation/aws-lambda/aws-lambda-core-1.0/library/src/test/java/io/opentelemetry/instrumentation/awslambdacore/v1_0/internal/ParentContextExtractorTest.java -@@ -0,0 +1,135 @@ +@@ -0,0 +1,337 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 @@ -166,13 +246,17 @@ index 0000000000..1fa0b6e536 +package io.opentelemetry.instrumentation.awslambdacore.v1_0.internal; + +import static org.assertj.core.api.Assertions.assertThat; ++import static org.mockito.Mockito.mock; ++import static org.mockito.Mockito.when; + ++import com.amazonaws.services.lambda.runtime.Context; +import com.google.common.collect.ImmutableMap; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; -+import io.opentelemetry.context.Context; +import io.opentelemetry.context.propagation.ContextPropagators; ++import io.opentelemetry.context.propagation.TextMapPropagator; ++import io.opentelemetry.contrib.awsxray.propagator.AwsXrayPropagator; +import io.opentelemetry.extension.trace.propagation.B3Propagator; +import java.util.Map; +import org.junit.jupiter.api.Test; @@ -190,12 +274,33 @@ index 0000000000..1fa0b6e536 +class ParentContextExtractorTest { + + @SystemStub final EnvironmentVariables environmentVariables = new EnvironmentVariables(); ++ @SystemStub final SystemProperties systemProperties = new SystemProperties(); + -+ private static final OpenTelemetry OTEL = ++ private static final OpenTelemetry OTEL_WITH_B3_PROPAGATOR = + OpenTelemetry.propagating(ContextPropagators.create(B3Propagator.injectingSingleHeader())); + -+ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER = -+ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL); ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_B3_PROPAGATOR = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_B3_PROPAGATOR); ++ ++ // Only for new lambda context tests ++ private static final OpenTelemetry OTEL_WITH_B3_XRAY_PROPAGATORS = ++ OpenTelemetry.propagating( ++ ContextPropagators.create( ++ TextMapPropagator.composite( ++ B3Propagator.injectingSingleHeader(), AwsXrayPropagator.getInstance()))); ++ private static final OpenTelemetry OTEL_WITH_XRAY_B3_PROPAGATORS = ++ OpenTelemetry.propagating( ++ ContextPropagators.create( ++ TextMapPropagator.composite( ++ AwsXrayPropagator.getInstance(), B3Propagator.injectingSingleHeader()))); ++ ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_B3_XRAY_PROPAGATORS); ++ ++ private static final AwsLambdaFunctionInstrumenter INSTRUMENTER_WITH_XRAY_B3_PROPAGATORS = ++ AwsLambdaFunctionInstrumenterFactory.createInstrumenter(OTEL_WITH_XRAY_B3_PROPAGATORS); ++ ++ private static final Context mockLambdaContext = mock(Context.class); + + @Test + void shouldUseHttpIfAwsParentNotSampled() { @@ -213,7 +318,8 @@ index 0000000000..1fa0b6e536 + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=0"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract(headers, INSTRUMENTER_WITH_B3_PROPAGATOR, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -239,7 +345,9 @@ index 0000000000..1fa0b6e536 + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -262,7 +370,8 @@ index 0000000000..1fa0b6e536 + "true"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract(headers, INSTRUMENTER_WITH_B3_PROPAGATOR, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); @@ -277,22 +386,221 @@ index 0000000000..1fa0b6e536 + // given + systemProperties.set( + "com.amazonaws.xray.traceHeader", -+ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=0"); ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); + environmentVariables.set( + "_X_AMZN_TRACE_ID", + "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); + + // when -+ Context context = ParentContextExtractor.extract(headers, INSTRUMENTER); ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ ImmutableMap.of(), INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContext); + // then + Span span = Span.fromContext(context); + SpanContext spanContext = span.getSpanContext(); + assertThat(spanContext.isValid()).isTrue(); + assertThat(spanContext.isValid()).isTrue(); + assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); -+ assertThat(spanContext.getTraceId()).isEqualTo("d188f8fa79d48a391a778fa7"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ } ++ ++ @Test ++ void shouldUseLambdaContextToExtractXrayTraceId() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverSystemProperty() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverEnvVariable() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-4fd0b613-1f19f39af59518d127b0cafe;Parent=0000000000000123;Sampled=1"); ++ environmentVariables.set( ++ "_X_AMZN_TRACE_ID", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldPreferLambdaContextOverHttp() { ++ // given ++ Map headers = ++ ImmutableMap.of( ++ "X-b3-traceId", ++ "4fd0b6131f19f39af59518d127b0cafe", ++ "x-b3-spanid", ++ "0000000000000123", ++ "X-B3-Sampled", ++ "true"); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000456"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa6"); ++ } ++ ++ @Test ++ void shouldPreferHttpOverXrayIdSetByLambdaContext() { ++ // given ++ Map headers = ++ ImmutableMap.of( ++ "X-b3-traceId", ++ "4fd0b6131f19f39af59518d127b0cafe", ++ "x-b3-spanid", ++ "0000000000000123", ++ "X-B3-Sampled", ++ "true"); ++ environmentVariables.set( ++ "_X_AMZN_TRACE_ID", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()) ++ .thenReturn("Root=1-8a3c60f7-d188f8fa79d48a391a778fa6;Parent=0000000000000456;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_XRAY_B3_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000123"); ++ assertThat(spanContext.getTraceId()).isEqualTo("4fd0b6131f19f39af59518d127b0cafe"); ++ } ++ ++ @Test ++ void shouldFallbackToSystemPropertyIfContextTraceIdIsNull() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()).thenReturn(null); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); ++ } ++ ++ @Test ++ void shouldFallbackToSystemPropertyIfContextTraceIdIsEmptyString() { ++ // given ++ Map headers = ImmutableMap.of(); ++ Context mockLambdaContextWithXrayTraceId = mock(Context.class); ++ when(mockLambdaContextWithXrayTraceId.getXrayTraceId()).thenReturn(""); ++ systemProperties.set( ++ "com.amazonaws.xray.traceHeader", ++ "Root=1-8a3c60f7-d188f8fa79d48a391a778fa7;Parent=0000000000000789;Sampled=1"); ++ ++ // when ++ io.opentelemetry.context.Context context = ++ ParentContextExtractor.extract( ++ headers, INSTRUMENTER_WITH_B3_XRAY_PROPAGATORS, mockLambdaContextWithXrayTraceId); ++ // then ++ Span span = Span.fromContext(context); ++ SpanContext spanContext = span.getSpanContext(); ++ assertThat(spanContext.isValid()).isTrue(); ++ assertThat(spanContext.getSpanId()).isEqualTo("0000000000000789"); ++ assertThat(spanContext.getTraceId()).isEqualTo("8a3c60f7d188f8fa79d48a391a778fa7"); + } +} +diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java +index e059250807..1fa80c3735 100644 +--- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestHandlerInstrumentation.java +@@ -70,7 +70,7 @@ public class AwsLambdaRequestHandlerInstrumentation implements TypeInstrumentati + } + input = AwsLambdaRequest.create(context, arg, headers); + io.opentelemetry.context.Context parentContext = +- AwsLambdaInstrumentationHelper.functionInstrumenter().extract(input); ++ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(input, context); + + if (!AwsLambdaInstrumentationHelper.functionInstrumenter() + .shouldStart(parentContext, input)) { +diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java +index fb5971016a..d31389e1c4 100644 +--- a/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java ++++ b/instrumentation/aws-lambda/aws-lambda-events-2.2/javaagent/src/main/java/io/opentelemetry/javaagent/instrumentation/awslambdaevents/v2_2/AwsLambdaRequestStreamHandlerInstrumentation.java +@@ -62,7 +62,7 @@ public class AwsLambdaRequestStreamHandlerInstrumentation implements TypeInstrum + Map headers = Collections.emptyMap(); + otelInput = AwsLambdaRequest.create(context, input, headers); + io.opentelemetry.context.Context parentContext = +- AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput); ++ AwsLambdaInstrumentationHelper.functionInstrumenter().extract(otelInput, context); + + if (!AwsLambdaInstrumentationHelper.functionInstrumenter() + .shouldStart(parentContext, otelInput)) { diff --git a/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java b/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java index 4cd11fc0c4..7b7d62755c 100644 --- a/instrumentation/aws-lambda/aws-lambda-events-2.2/library/src/main/java/io/opentelemetry/instrumentation/awslambdaevents/v2_2/internal/AwsLambdaSqsInstrumenterFactory.java @@ -304,7 +612,7 @@ index 4cd11fc0c4..7b7d62755c 100644 - .addSpanLinksExtractor(new SqsMessageSpanLinksExtractor()) .buildInstrumenter(SpanKindExtractor.alwaysConsumer()); } - + diff --git a/version.gradle.kts b/version.gradle.kts index 023d04703c..b267166804 100644 --- a/version.gradle.kts From aabd5b20ed06b6bea22684f4b7245b5549dc4ddd Mon Sep 17 00:00:00 2001 From: "Luke (GuangHui) Zhang" Date: Fri, 19 Sep 2025 18:50:51 -0700 Subject: [PATCH 63/83] Fix: Incorrect zip file name in Lambda Java release workflow (#1208) (#1210) Problem: The Lambda Java layer release fails because the workflow uses the wrong artifact (layer zip file) name. https://github.com/aws-observability/aws-otel-java-instrumentation/actions/runs/17867947751 Solution: Rename aws-opentelemetry-java-layer.zip to layer.zip. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/release-lambda.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index bc1263180f..6394e1dfd1 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -206,10 +206,13 @@ jobs: done echo "}" >> ../layer_cdk cat ../layer_cdk - - name: download layer.zip + - name: download aws-opentelemetry-java-layer.zip uses: actions/download-artifact@v5 with: - name: layer.zip + name: aws-opentelemetry-java-layer.zip + - name: rename to layer.zip + run: | + mv aws-opentelemetry-java-layer.zip layer.zip - name: Get commit hash id: commit run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT From 74f710561d945a673a3db3333e359ccd7992d454 Mon Sep 17 00:00:00 2001 From: Mahad Janjua <134644284+majanjua-amzn@users.noreply.github.com> Date: Mon, 22 Sep 2025 10:25:04 -0700 Subject: [PATCH 64/83] Add adaptive sampling e2e test to build (#1211) --- .github/workflows/application-signals-e2e-test.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/application-signals-e2e-test.yml b/.github/workflows/application-signals-e2e-test.yml index 1f369b8316..ca152b37b0 100644 --- a/.github/workflows/application-signals-e2e-test.yml +++ b/.github/workflows/application-signals-e2e-test.yml @@ -99,6 +99,13 @@ jobs: secrets: inherit with: caller-workflow-name: 'main-build' + + java-ec2-adaptive-sampling-test: + needs: [ upload-main-build ] + uses: aws-observability/aws-application-signals-test-framework/.github/workflows/java-ec2-adaptive-sampling-test.yml@main + secrets: inherit + with: + caller-workflow-name: 'main-build' # # DOCKER DISTRIBUTION LANGUAGE VERSION COVERAGE # DEFAULT SETTING: {Java Version}, EKS, AMD64, AL2 From a4d69a77c480b9fd0639973eca6146a606869331 Mon Sep 17 00:00:00 2001 From: Mahad Janjua <134644284+majanjua-amzn@users.noreply.github.com> Date: Mon, 22 Sep 2025 16:48:27 -0700 Subject: [PATCH 65/83] Merge 2.11.5 back into main branch (#1201) --- .github/actions/patch-dependencies/action.yml | 33 +- .../patches/opentelemetry-java-contrib.patch | 851 +++++++++++++----- .../opentelemetry-java-instrumentation.patch | 28 + .github/scripts/patch.sh | 13 + CHANGELOG.md | 4 +- dependencyManagement/build.gradle.kts | 2 +- lambda-layer/build-layer.sh | 51 +- .../aws-otel-java-instrumentation.patch | 2 +- .../opentelemetry-java-instrumentation.patch | 4 +- scripts/local_patch.sh | 24 + 10 files changed, 724 insertions(+), 288 deletions(-) create mode 100644 .github/patches/opentelemetry-java-instrumentation.patch diff --git a/.github/actions/patch-dependencies/action.yml b/.github/actions/patch-dependencies/action.yml index 9281534275..529d956221 100644 --- a/.github/actions/patch-dependencies/action.yml +++ b/.github/actions/patch-dependencies/action.yml @@ -49,6 +49,9 @@ runs: if [[ -f .github/patches/opentelemetry-java.patch ]]; then echo 'patch_otel_java=true' >> $GITHUB_ENV fi + if [[ -f .github/patches/opentelemetry-java-instrumentation.patch ]]; then + echo 'patch_otel_java_instrumentation=true' >> $GITHUB_ENV + fi if [[ -f .github/patches/opentelemetry-java-contrib.patch ]]; then echo 'patch_otel_java_contrib=true' >> $GITHUB_ENV fi @@ -57,21 +60,22 @@ runs: - name: Clone and patch repositories run: .github/scripts/patch.sh if: ${{ env.patch_otel_java == 'true' || - env.patch_otel_java_contrib == 'true' }} + env.patch_otel_java_contrib == 'true' || + env.patch_otel_java_instrumentation == 'true' }} shell: bash - name: Build opentelemetry-java with tests uses: gradle/gradle-build-action@v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests != 'false' }} with: - arguments: build publishToMavenLocal + arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java - name: Build opentelemetry-java uses: gradle/gradle-build-action@v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests == 'false' }} with: - arguments: publishToMavenLocal + arguments: publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java - name: cleanup opentelemetry-java @@ -83,17 +87,36 @@ runs: uses: gradle/gradle-build-action@v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests != 'false' }} with: - arguments: build publishToMavenLocal + arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-contrib - name: Build opentelemetry-java-contrib uses: gradle/gradle-build-action@v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests == 'false' }} with: - arguments: publishToMavenLocal + arguments: publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-contrib - name: cleanup opentelemetry-java-contrib run: rm -rf opentelemetry-java-contrib if: ${{ env.patch_otel_java_contrib == 'true' }} shell: bash + + - name: Build opentelemetry-java-instrumentation with tests + uses: gradle/gradle-build-action@v2 + if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests != 'false' }} + with: + arguments: check -x spotlessCheck publishToMavenLocal --scan --no-daemon + build-root-directory: opentelemetry-java-instrumentation + + - name: Build opentelemetry java instrumentation + uses: gradle/gradle-build-action@v2 + if: ${{ env.patch_otel_java_instrumentation == 'true' && inputs.run_tests == 'false' }} + with: + arguments: publishToMavenLocal --scan --no-daemon + build-root-directory: opentelemetry-java-instrumentation + + - name: cleanup opentelmetry-java-instrumentation + run: rm -rf opentelemetry-java-instrumentation + if: ${{ env.patch_otel_java_instrumentation == 'true' }} + shell: bash \ No newline at end of file diff --git a/.github/patches/opentelemetry-java-contrib.patch b/.github/patches/opentelemetry-java-contrib.patch index 718fa85de4..6db6a272db 100644 --- a/.github/patches/opentelemetry-java-contrib.patch +++ b/.github/patches/opentelemetry-java-contrib.patch @@ -30,32 +30,98 @@ index 4f7743a3..9e2082ed 100644 + }, ], } +diff --git a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java +index 721d0144..dce2d2a3 100644 +--- a/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java ++++ b/aws-xray-propagator/src/main/java/io/opentelemetry/contrib/awsxray/propagator/AwsXrayPropagator.java +@@ -9,6 +9,7 @@ import static io.opentelemetry.api.internal.OtelEncodingUtils.isValidBase16Strin + + import io.opentelemetry.api.baggage.Baggage; + import io.opentelemetry.api.baggage.BaggageBuilder; ++import io.opentelemetry.api.baggage.propagation.W3CBaggagePropagator; + import io.opentelemetry.api.internal.StringUtils; + import io.opentelemetry.api.trace.Span; + import io.opentelemetry.api.trace.SpanContext; +@@ -80,6 +81,9 @@ public final class AwsXrayPropagator implements TextMapPropagator { + private static final String INVALID_LINEAGE = "-1:11111111:0"; + private static final int NUM_OF_LINEAGE_DELIMITERS = 2; + ++ // Copied from AwsSamplingResult in aws-xray extension ++ private static final String AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY = "xrsr"; ++ + private static final List FIELDS = Collections.singletonList(TRACE_HEADER_KEY); + + private static final AwsXrayPropagator INSTANCE = new AwsXrayPropagator(); +@@ -140,6 +144,16 @@ public final class AwsXrayPropagator implements TextMapPropagator { + + Baggage baggage = Baggage.fromContext(context); + String lineageHeader = baggage.getEntryValue(LINEAGE_KEY); ++ // Get sampling rule from trace state and inject into baggage ++ // This is a back up in case the next service does not have trace state propagation ++ String ruleFromTraceState = ++ spanContext.getTraceState().get(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY); ++ if (ruleFromTraceState != null) { ++ baggage = ++ baggage.toBuilder() ++ .put(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY, ruleFromTraceState) ++ .build(); ++ } + + if (lineageHeader != null) { + traceHeader +@@ -152,6 +166,9 @@ public final class AwsXrayPropagator implements TextMapPropagator { + // add 256 character truncation + String truncatedTraceHeader = traceHeader.substring(0, Math.min(traceHeader.length(), 256)); + setter.set(carrier, TRACE_HEADER_KEY, truncatedTraceHeader); ++ ++ // Ensure baggage is propagated with any modifications ++ W3CBaggagePropagator.getInstance().inject(context.with(baggage), carrier, setter); + } + + @Override +@@ -245,12 +262,15 @@ public final class AwsXrayPropagator implements TextMapPropagator { + logger.finest("Both traceId and spanId are required to extract a valid span context. "); + } + ++ SpanContext upstreamSpanContext = Span.fromContext(context).getSpanContext(); + SpanContext spanContext = + SpanContext.createFromRemoteParent( + StringUtils.padLeft(traceId, TraceId.getLength()), + spanId, + isSampled ? TraceFlags.getSampled() : TraceFlags.getDefault(), +- TraceState.getDefault()); ++ upstreamSpanContext.isValid() ++ ? upstreamSpanContext.getTraceState() ++ : TraceState.getDefault()); + + if (spanContext.isValid()) { + context = context.with(Span.wrap(spanContext)); diff --git a/aws-xray/build.gradle.kts b/aws-xray/build.gradle.kts index 54dabba7..d56b12bd 100644 --- a/aws-xray/build.gradle.kts +++ b/aws-xray/build.gradle.kts @@ -11,6 +11,7 @@ dependencies { api("io.opentelemetry:opentelemetry-sdk-trace") - + compileOnly("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure") + implementation("io.opentelemetry.semconv:opentelemetry-semconv:1.32.0-alpha") - + implementation("com.squareup.okhttp3:okhttp") implementation("io.opentelemetry.semconv:opentelemetry-semconv") @@ -25,6 +26,7 @@ dependencies { - + implementation("com.fasterxml.jackson.core:jackson-core") implementation("com.fasterxml.jackson.core:jackson-databind") + implementation("com.github.ben-manes.caffeine:caffeine:2.9.3") - + testImplementation("com.linecorp.armeria:armeria-junit5") testImplementation("io.opentelemetry:opentelemetry-sdk-extension-autoconfigure") diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java new file mode 100644 -index 00000000..41f22f90 +index 00000000..4aed8959 --- /dev/null +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsSamplingResult.java -@@ -0,0 +1,54 @@ +@@ -0,0 +1,56 @@ +/* + * Copyright The OpenTelemetry Authors + * SPDX-License-Identifier: Apache-2.0 @@ -67,6 +133,7 @@ index 00000000..41f22f90 +import io.opentelemetry.api.trace.TraceState; +import io.opentelemetry.sdk.trace.samplers.SamplingDecision; +import io.opentelemetry.sdk.trace.samplers.SamplingResult; ++import javax.annotation.Nullable; + +final class AwsSamplingResult implements SamplingResult { + @@ -76,17 +143,17 @@ index 00000000..41f22f90 + + private final SamplingDecision decision; + private final Attributes attributes; -+ private final String samplingRuleName; ++ @Nullable private final String samplingRuleName; + + private AwsSamplingResult( -+ SamplingDecision decision, Attributes attributes, String samplingRuleName) { ++ SamplingDecision decision, Attributes attributes, @Nullable String samplingRuleName) { + this.decision = decision; + this.attributes = attributes; + this.samplingRuleName = samplingRuleName; + } + + static AwsSamplingResult create( -+ SamplingDecision decision, Attributes attributes, String samplingRuleName) { ++ SamplingDecision decision, Attributes attributes, @Nullable String samplingRuleName) { + return new AwsSamplingResult(decision, attributes, samplingRuleName); + } + @@ -102,7 +169,8 @@ index 00000000..41f22f90 + + @Override + public TraceState getUpdatedTraceState(TraceState parentTraceState) { -+ if (parentTraceState.get(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY) == null) { ++ if (parentTraceState.get(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY) == null ++ && this.samplingRuleName != null) { + return parentTraceState.toBuilder() + .put(AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY, samplingRuleName) + .build(); @@ -265,7 +333,7 @@ index 00000000..dc5b7a01 + } +} diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java -index ad9b72a2..7864f358 100644 +index ad9b72a2..31d5a293 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSampler.java @@ -9,16 +9,22 @@ import io.opentelemetry.api.common.Attributes; @@ -301,8 +369,11 @@ index ad9b72a2..7864f358 100644 private final Resource resource; private final Clock clock; private final Sampler initialSampler; -@@ -59,6 +68,9 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { - @Nullable private volatile XrayRulesSampler internalXrayRulesSampler; +@@ -56,9 +65,11 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + @Nullable private volatile ScheduledFuture pollFuture; + @Nullable private volatile ScheduledFuture fetchTargetsFuture; + @Nullable private volatile GetSamplingRulesResponse previousRulesResponse; +- @Nullable private volatile XrayRulesSampler internalXrayRulesSampler; private volatile Sampler sampler; + @Nullable private AwsXrayAdaptiveSamplingConfig adaptiveSamplingConfig; @@ -311,7 +382,7 @@ index ad9b72a2..7864f358 100644 /** * Returns a {@link AwsXrayRemoteSamplerBuilder} with the given {@link Resource}. This {@link * Resource} should be the same as what the OpenTelemetry SDK is configured with. -@@ -120,6 +132,40 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { +@@ -120,13 +131,47 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { return "AwsXrayRemoteSampler{" + sampler.getDescription() + "}"; } @@ -321,8 +392,8 @@ index ad9b72a2..7864f358 100644 + } else if (config != null && this.adaptiveSamplingConfig == null) { + // Save here and also pass to XrayRulesSampler directly as it already exists + this.adaptiveSamplingConfig = config; -+ if (internalXrayRulesSampler != null) { -+ internalXrayRulesSampler.setAdaptiveSamplingConfig(config); ++ if (sampler instanceof XrayRulesSampler) { ++ ((XrayRulesSampler) sampler).setAdaptiveSamplingConfig(config); + } + } + } @@ -344,27 +415,44 @@ index ad9b72a2..7864f358 100644 + throw new IllegalStateException( + "Programming bug - BatchSpanProcessor is null while trying to adapt sampling"); + } -+ if (internalXrayRulesSampler != null) { -+ internalXrayRulesSampler.adaptSampling(span, spanData, this.bsp::onEnd); ++ if (sampler instanceof XrayRulesSampler) { ++ ((XrayRulesSampler) sampler).adaptSampling(span, spanData, this.bsp::onEnd); + } + } + private void getAndUpdateSampler() { try { // No pagination support yet, or possibly ever. -@@ -134,8 +180,8 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + GetSamplingRulesResponse response = + client.getSamplingRules(GetSamplingRulesRequest.create(null)); + if (!response.equals(previousRulesResponse)) { +- updateInternalSamplers( ++ sampler = + new XrayRulesSampler( + clientId, + resource, +@@ -134,8 +179,8 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { initialSampler, response.getSamplingRules().stream() .map(SamplingRuleRecord::getRule) - .collect(Collectors.toList()))); - + .collect(Collectors.toList()), -+ adaptiveSamplingConfig)); ++ adaptiveSamplingConfig); previousRulesResponse = response; ScheduledFuture existingFetchTargetsFuture = fetchTargetsFuture; if (existingFetchTargetsFuture != null) { -@@ -179,14 +225,29 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { - XrayRulesSampler xrayRulesSampler = this.internalXrayRulesSampler; +@@ -172,25 +217,41 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + } + + private void fetchTargets() { +- if (this.internalXrayRulesSampler == null) { ++ if (!(sampler instanceof XrayRulesSampler)) { + throw new IllegalStateException("Programming bug."); + } + +- XrayRulesSampler xrayRulesSampler = this.internalXrayRulesSampler; ++ XrayRulesSampler xrayRulesSampler = (XrayRulesSampler) sampler; try { Date now = Date.from(Instant.ofEpochSecond(0, clock.now())); - List statistics = xrayRulesSampler.snapshot(now); @@ -396,6 +484,24 @@ index ad9b72a2..7864f358 100644 Map targets = response.getDocuments().stream() .collect(Collectors.toMap(SamplingTargetDocument::getRuleName, Function.identity())); +- updateInternalSamplers(xrayRulesSampler.withTargets(targets, requestedTargetRuleNames, now)); ++ sampler = ++ xrayRulesSampler = xrayRulesSampler.withTargets(targets, requestedTargetRuleNames, now); + } catch (Throwable t) { + // Might be a transient API failure, try again after a default interval. + fetchTargetsFuture = +@@ -226,11 +287,6 @@ public final class AwsXrayRemoteSampler implements Sampler, Closeable { + return new String(clientIdChars); + } + +- private void updateInternalSamplers(XrayRulesSampler xrayRulesSampler) { +- this.internalXrayRulesSampler = xrayRulesSampler; +- this.sampler = Sampler.parentBased(internalXrayRulesSampler); +- } +- + // Visible for testing + XraySamplerClient getClient() { + return client; diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java index dca930d5..01835dc2 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/GetSamplingRulesResponse.java @@ -590,11 +696,14 @@ index c1e178f5..406f07e2 100644 + } } diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -index 1d97c4ae..6462c7f3 100644 +index 1d97c4ae..dd369f5f 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplier.java -@@ -11,10 +11,13 @@ import io.opentelemetry.api.common.AttributeKey; +@@ -9,12 +9,16 @@ import static io.opentelemetry.semconv.ServiceAttributes.SERVICE_NAME; + + import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; ++import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.context.Context; +import io.opentelemetry.contrib.awsxray.GetSamplingTargetsRequest.SamplingBoostStatisticsDocument; @@ -607,7 +716,7 @@ index 1d97c4ae..6462c7f3 100644 import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingDecision; -@@ -76,12 +79,20 @@ final class SamplingRuleApplier { +@@ -76,12 +80,20 @@ final class SamplingRuleApplier { private final String clientId; private final String ruleName; @@ -628,7 +737,7 @@ index 1d97c4ae..6462c7f3 100644 private final Map attributeMatchers; private final Matcher urlPathMatcher; private final Matcher serviceNameMatcher; -@@ -94,7 +105,11 @@ final class SamplingRuleApplier { +@@ -94,7 +106,11 @@ final class SamplingRuleApplier { private final long nextSnapshotTimeNanos; @@ -641,7 +750,7 @@ index 1d97c4ae..6462c7f3 100644 this.clientId = clientId; this.clock = clock; String ruleName = rule.getRuleName(); -@@ -108,6 +123,8 @@ final class SamplingRuleApplier { +@@ -108,6 +124,8 @@ final class SamplingRuleApplier { } this.ruleName = ruleName; @@ -650,7 +759,7 @@ index 1d97c4ae..6462c7f3 100644 // We don't have a SamplingTarget so are ready to report a snapshot right away. nextSnapshotTimeNanos = clock.nanoTime(); -@@ -124,7 +141,15 @@ final class SamplingRuleApplier { +@@ -124,7 +142,15 @@ final class SamplingRuleApplier { reservoirSampler = Sampler.alwaysOff(); borrowing = false; } @@ -667,7 +776,7 @@ index 1d97c4ae..6462c7f3 100644 if (rule.getAttributes().isEmpty()) { attributeMatchers = Collections.emptyMap(); -@@ -147,11 +172,16 @@ final class SamplingRuleApplier { +@@ -147,11 +173,16 @@ final class SamplingRuleApplier { private SamplingRuleApplier( String clientId, String ruleName, @@ -684,7 +793,7 @@ index 1d97c4ae..6462c7f3 100644 Map attributeMatchers, Matcher urlPathMatcher, Matcher serviceNameMatcher, -@@ -163,11 +193,16 @@ final class SamplingRuleApplier { +@@ -163,11 +194,16 @@ final class SamplingRuleApplier { long nextSnapshotTimeNanos) { this.clientId = clientId; this.ruleName = ruleName; @@ -701,7 +810,7 @@ index 1d97c4ae..6462c7f3 100644 this.attributeMatchers = attributeMatchers; this.urlPathMatcher = urlPathMatcher; this.serviceNameMatcher = serviceNameMatcher; -@@ -177,6 +212,7 @@ final class SamplingRuleApplier { +@@ -177,6 +213,7 @@ final class SamplingRuleApplier { this.resourceArnMatcher = resourceArnMatcher; this.statistics = statistics; this.nextSnapshotTimeNanos = nextSnapshotTimeNanos; @@ -709,13 +818,40 @@ index 1d97c4ae..6462c7f3 100644 } @SuppressWarnings("deprecation") // TODO -@@ -273,45 +309,84 @@ final class SamplingRuleApplier { - statistics.sampled.increment(); +@@ -257,8 +294,13 @@ final class SamplingRuleApplier { + SpanKind spanKind, + Attributes attributes, + List parentLinks) { ++ // Only emit statistics for spans for which a sampling decision is being made actively ++ // i.e. The root span in a call chain ++ boolean shouldCount = !Span.fromContext(parentContext).getSpanContext().isValid(); + // Incrementing requests first ensures sample / borrow rate are positive. +- statistics.requests.increment(); ++ if (shouldCount) { ++ statistics.requests.increment(); ++ } + boolean reservoirExpired = clock.nanoTime() >= reservoirEndTimeNanos; + SamplingResult result = + !reservoirExpired +@@ -267,51 +309,92 @@ final class SamplingRuleApplier { + : SamplingResult.create(SamplingDecision.DROP); + if (result.getDecision() != SamplingDecision.DROP) { + // We use the result from the reservoir sampler if it worked. +- if (borrowing) { +- statistics.borrowed.increment(); ++ if (shouldCount) { ++ if (borrowing) { ++ statistics.borrowed.increment(); ++ } ++ statistics.sampled.increment(); + } +- statistics.sampled.increment(); return result; } - result = - fixedRateSampler.shouldSample( - parentContext, traceId, name, spanKind, attributes, parentLinks); +- if (result.getDecision() != SamplingDecision.DROP) { + + if (clock.nanoTime() < boostEndTimeNanos) { + result = @@ -726,7 +862,7 @@ index 1d97c4ae..6462c7f3 100644 + fixedRateSampler.shouldSample( + parentContext, traceId, name, spanKind, attributes, parentLinks); + } - if (result.getDecision() != SamplingDecision.DROP) { ++ if (shouldCount && result.getDecision() != SamplingDecision.DROP) { statistics.sampled.increment(); } return result; @@ -811,7 +947,7 @@ index 1d97c4ae..6462c7f3 100644 + Duration.between(now.toInstant(), target.getReservoirQuotaTtl().toInstant()) .toNanos(); } -@@ -319,16 +394,36 @@ final class SamplingRuleApplier { +@@ -319,16 +402,36 @@ final class SamplingRuleApplier { target.getIntervalSecs() != null ? TimeUnit.SECONDS.toNanos(target.getIntervalSecs()) : AwsXrayRemoteSampler.DEFAULT_TARGET_INTERVAL_NANOS; @@ -849,7 +985,7 @@ index 1d97c4ae..6462c7f3 100644 attributeMatchers, urlPathMatcher, serviceNameMatcher, -@@ -344,11 +439,16 @@ final class SamplingRuleApplier { +@@ -344,11 +447,16 @@ final class SamplingRuleApplier { return new SamplingRuleApplier( clientId, ruleName, @@ -866,7 +1002,7 @@ index 1d97c4ae..6462c7f3 100644 attributeMatchers, urlPathMatcher, serviceNameMatcher, -@@ -364,6 +464,15 @@ final class SamplingRuleApplier { +@@ -364,6 +472,15 @@ final class SamplingRuleApplier { return ruleName; } @@ -882,7 +1018,21 @@ index 1d97c4ae..6462c7f3 100644 @Nullable private static String getArn(Attributes attributes, Resource resource) { String arn = resource.getAttributes().get(AWS_ECS_CONTAINER_ARN); -@@ -515,5 +624,30 @@ final class SamplingRuleApplier { +@@ -500,11 +617,11 @@ final class SamplingRuleApplier { + } + + private Sampler createRateLimited(int numPerSecond) { +- return new RateLimitingSampler(numPerSecond, clock); ++ return Sampler.parentBased(new RateLimitingSampler(numPerSecond, clock)); + } + + private static Sampler createFixedRate(double rate) { +- return Sampler.traceIdRatioBased(rate); ++ return Sampler.parentBased(Sampler.traceIdRatioBased(rate)); + } + + // We keep track of sampling requests and decisions to report to X-Ray to allow it to allocate +@@ -515,5 +632,30 @@ final class SamplingRuleApplier { final LongAdder requests = new LongAdder(); final LongAdder sampled = new LongAdder(); final LongAdder borrowed = new LongAdder(); @@ -914,10 +1064,10 @@ index 1d97c4ae..6462c7f3 100644 } } diff --git a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java -index 75977dc0..9620ba2b 100644 +index 75977dc0..48bdeb0f 100644 --- a/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java +++ b/aws-xray/src/main/java/io/opentelemetry/contrib/awsxray/XrayRulesSampler.java -@@ -5,42 +5,79 @@ +@@ -5,42 +5,81 @@ package io.opentelemetry.contrib.awsxray; @@ -926,9 +1076,11 @@ index 75977dc0..9620ba2b 100644 + +import com.github.benmanes.caffeine.cache.Cache; +import com.github.benmanes.caffeine.cache.Caffeine; ++import io.opentelemetry.api.baggage.Baggage; +import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; ++import io.opentelemetry.api.trace.SpanContext; import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.StatusCode; import io.opentelemetry.context.Context; @@ -998,7 +1150,7 @@ index 75977dc0..9620ba2b 100644 this( clientId, resource, -@@ -49,8 +86,19 @@ final class XrayRulesSampler implements Sampler { +@@ -49,8 +88,19 @@ final class XrayRulesSampler implements Sampler { rules.stream() // Lower priority value takes precedence so normal ascending sort. .sorted(Comparator.comparingInt(GetSamplingRulesResponse.SamplingRule::getPriority)) @@ -1020,7 +1172,7 @@ index 75977dc0..9620ba2b 100644 } private XrayRulesSampler( -@@ -58,12 +106,36 @@ final class XrayRulesSampler implements Sampler { +@@ -58,12 +108,36 @@ final class XrayRulesSampler implements Sampler { Resource resource, Clock clock, Sampler fallbackSampler, @@ -1058,15 +1210,22 @@ index 75977dc0..9620ba2b 100644 } @Override -@@ -74,10 +146,36 @@ final class XrayRulesSampler implements Sampler { +@@ -74,10 +148,43 @@ final class XrayRulesSampler implements Sampler { SpanKind spanKind, Attributes attributes, List parentLinks) { ++ SpanContext parentSpanContext = Span.fromContext(parentContext).getSpanContext(); + String upstreamMatchedRule = -+ Span.fromContext(parentContext) -+ .getSpanContext() ++ parentSpanContext + .getTraceState() + .get(AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY); ++ if (upstreamMatchedRule == null) { ++ Baggage b = Baggage.fromContext(parentContext); ++ upstreamMatchedRule = ++ b != null ++ ? b.getEntryValue(AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY) ++ : null; ++ } for (SamplingRuleApplier applier : ruleAppliers) { if (applier.matches(attributes, resource)) { - return applier.shouldSample( @@ -1078,26 +1237,26 @@ index 75977dc0..9620ba2b 100644 + // Otherwise, encode and propagate the matched sampling rule using AwsSamplingResult + String ruleToPropagate; + if (upstreamMatchedRule != null) { -+ ruleToPropagate = hashToRuleMap.getOrDefault(upstreamMatchedRule, applier.getRuleName()); ++ ruleToPropagate = hashToRuleMap.getOrDefault(upstreamMatchedRule, null); ++ } else if (parentSpanContext.isValid()) { ++ ruleToPropagate = null; + } else { + ruleToPropagate = applier.getRuleName(); + } -+ String hashedRule = ruleToHashMap.getOrDefault(ruleToPropagate, ruleToPropagate); -+ if (this.adaptiveSamplingConfig != null -+ && this.adaptiveSamplingConfig.getAnomalyCaptureLimit() != null) { -+ // If the span is capturable based on local SDK config, add sampling rule attribute -+ return AwsSamplingResult.create( -+ result.getDecision(), -+ result.getAttributes().toBuilder() -+ .put(AWS_XRAY_SAMPLING_RULE.getKey(), ruleToPropagate) -+ .build(), -+ hashedRule); -+ } -+ return AwsSamplingResult.create(result.getDecision(), result.getAttributes(), hashedRule); ++ String hashedRule = ruleToHashMap.getOrDefault(ruleToPropagate, upstreamMatchedRule); ++ ++ return AwsSamplingResult.create( ++ result.getDecision(), ++ result.getAttributes().toBuilder() ++ .put( ++ AWS_XRAY_SAMPLING_RULE.getKey(), ++ ruleToPropagate != null ? ruleToPropagate : "UNKNOWN") ++ .build(), ++ hashedRule); } } -@@ -96,7 +194,184 @@ final class XrayRulesSampler implements Sampler { +@@ -96,7 +203,97 @@ final class XrayRulesSampler implements Sampler { return "XrayRulesSampler{" + Arrays.toString(ruleAppliers) + "}"; } @@ -1113,6 +1272,8 @@ index 75977dc0..9620ba2b 100644 + int anomalyTracesPerSecond = config.getAnomalyCaptureLimit().getAnomalyTracesPerSecond(); + this.anomalyCaptureRateLimiter = + new RateLimiter(anomalyTracesPerSecond, anomalyTracesPerSecond, clock); ++ } else { ++ this.anomalyCaptureRateLimiter = new RateLimiter(1, 1, clock); + } + } + } @@ -1121,81 +1282,10 @@ index 75977dc0..9620ba2b 100644 + if (!adaptiveSamplingRuleExists && this.adaptiveSamplingConfig == null) { + return; + } -+ Long statusCode = spanData.getAttributes().get(HTTP_RESPONSE_STATUS_CODE); -+ -+ boolean shouldBoostSampling = false; -+ boolean shouldCaptureAnomalySpan = false; + -+ List anomalyConditions = -+ adaptiveSamplingConfig != null ? adaptiveSamplingConfig.getAnomalyConditions() : null; -+ // Empty list -> no conditions will apply and we will not do anything -+ if (anomalyConditions != null && !anomalyConditions.isEmpty()) { -+ String operation = spanData.getAttributes().get(AwsAttributeKeys.AWS_LOCAL_OPERATION); -+ if (operation == null) { -+ operation = generateIngressOperation(spanData); -+ } -+ for (AwsXrayAdaptiveSamplingConfig.AnomalyConditions condition : anomalyConditions) { -+ // Skip condition if it would only re-apply action already being taken -+ if ((shouldBoostSampling -+ && AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST.equals( -+ condition.getUsage())) -+ || (shouldCaptureAnomalySpan -+ && AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE.equals( -+ condition.getUsage()))) { -+ continue; -+ } -+ // Check if the operation matches any in the list or if operations list is null (match all) -+ List operations = condition.getOperations(); -+ if (!(operations == null || operations.isEmpty() || operations.contains(operation))) { -+ continue; -+ } -+ // Check if any anomalyConditions detect an anomaly either through error code or latency -+ boolean isAnomaly = false; -+ -+ String errorCodeRegex = condition.getErrorCodeRegex(); -+ if (statusCode != null && errorCodeRegex != null) { -+ isAnomaly = statusCode.toString().matches(errorCodeRegex); -+ } -+ -+ Long highLatencyMs = condition.getHighLatencyMs(); -+ if (highLatencyMs != null) { -+ isAnomaly = -+ (errorCodeRegex == null || isAnomaly) -+ && (span.getLatencyNanos() / 1_000_000.0) >= highLatencyMs; -+ } -+ -+ if (isAnomaly) { -+ AwsXrayAdaptiveSamplingConfig.UsageType usage = condition.getUsage(); -+ if (usage != null) { -+ switch (usage) { -+ case BOTH: -+ shouldBoostSampling = true; -+ shouldCaptureAnomalySpan = true; -+ break; -+ case SAMPLING_BOOST: -+ shouldBoostSampling = true; -+ break; -+ case ANOMALY_TRACE_CAPTURE: -+ shouldCaptureAnomalySpan = true; -+ break; -+ default: // do nothing -+ } -+ } else { -+ shouldBoostSampling = true; -+ shouldCaptureAnomalySpan = true; -+ } -+ } -+ if (shouldBoostSampling && shouldCaptureAnomalySpan) { -+ break; -+ } -+ } -+ } else if ((statusCode != null && statusCode > 499) -+ || (statusCode == null -+ && spanData.getStatus() != null -+ && StatusCode.ERROR.equals(spanData.getStatus().getStatusCode()))) { -+ shouldBoostSampling = true; -+ shouldCaptureAnomalySpan = true; -+ } ++ AnomalyDetectionResult result = isAnomaly(span, spanData); ++ boolean shouldBoostSampling = result.shouldBoostSampling(); ++ boolean shouldCaptureAnomalySpan = result.shouldCaptureAnomalySpan(); + + String traceId = spanData.getTraceId(); + AwsXrayAdaptiveSamplingConfig.UsageType existingUsage = traceUsageCache.getIfPresent(traceId); @@ -1219,7 +1309,7 @@ index 75977dc0..9620ba2b 100644 + span.getSpanContext() + .getTraceState() + .get(AwsSamplingResult.AWS_XRAY_SAMPLING_RULE_TRACE_STATE_KEY); -+ String ruleNameForBoostStats = ++ String upstreamRuleName = + traceStateValue != null + ? hashToRuleMap.getOrDefault(traceStateValue, traceStateValue) + : traceStateValue; @@ -1227,7 +1317,7 @@ index 75977dc0..9620ba2b 100644 + SamplingRuleApplier matchedRule = null; + for (SamplingRuleApplier applier : ruleAppliers) { + // Rule propagated from when sampling decision was made, otherwise the matched rule -+ if (applier.getRuleName().equals(ruleNameForBoostStats)) { ++ if (applier.getRuleName().equals(upstreamRuleName)) { + ruleToReportTo = applier; + break; + } @@ -1240,10 +1330,12 @@ index 75977dc0..9620ba2b 100644 + logger.log( + Level.FINE, + "No sampling rule matched the request. This is a bug in either the OpenTelemetry SDK or X-Ray."); -+ } else { ++ } else if (!span.getParentSpanContext().isValid()) { ++ // Span is not from an upstream service, so we should boost the matched rule + ruleToReportTo = matchedRule; + } + } ++ + if (shouldBoostSampling + && ruleToReportTo != null + && ruleToReportTo.hasBoost() @@ -1256,34 +1348,14 @@ index 75977dc0..9620ba2b 100644 + } + } + -+ // Any interaction with a cache entry will reset the expiration timer of that entry -+ if (isSpanCaptured && isCountedAsAnomalyForBoost) { -+ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); -+ } else if (isSpanCaptured) { -+ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForBoost(existingUsage)) { -+ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); -+ } else { -+ this.traceUsageCache.put( -+ traceId, AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE); -+ } -+ } else if (isCountedAsAnomalyForBoost) { -+ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForAnomalyTraceCapture(existingUsage)) { -+ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); -+ } else { -+ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST); -+ } -+ } else if (existingUsage != null) { -+ this.traceUsageCache.put(traceId, existingUsage); -+ } else { -+ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.NEITHER); -+ } ++ updateTraceUsageCache(traceId, isSpanCaptured, isCountedAsAnomalyForBoost); + } + + List snapshot(Date now) { return Arrays.stream(ruleAppliers) .map(rule -> rule.snapshot(now)) .filter(Objects::nonNull) -@@ -115,15 +390,16 @@ final class XrayRulesSampler implements Sampler { +@@ -115,15 +312,16 @@ final class XrayRulesSampler implements Sampler { Map ruleTargets, Set requestedTargetRuleNames, Date now) { @@ -1302,7 +1374,7 @@ index 75977dc0..9620ba2b 100644 } if (requestedTargetRuleNames.contains(rule.getRuleName())) { // In practice X-Ray should return a target for any rule we requested but -@@ -135,6 +411,92 @@ final class XrayRulesSampler implements Sampler { +@@ -135,6 +333,216 @@ final class XrayRulesSampler implements Sampler { return rule; }) .toArray(SamplingRuleApplier[]::new); @@ -1319,6 +1391,85 @@ index 75977dc0..9620ba2b 100644 + traceUsageCache); + } + ++ private AnomalyDetectionResult isAnomaly(ReadableSpan span, SpanData spanData) { ++ boolean shouldBoostSampling = false; ++ boolean shouldCaptureAnomalySpan = false; ++ Long statusCode = spanData.getAttributes().get(HTTP_RESPONSE_STATUS_CODE); ++ ++ List anomalyConditions = ++ adaptiveSamplingConfig != null ? adaptiveSamplingConfig.getAnomalyConditions() : null; ++ // Empty list -> no conditions will apply and we will not do anything ++ if (anomalyConditions != null) { ++ String operation = spanData.getAttributes().get(AwsAttributeKeys.AWS_LOCAL_OPERATION); ++ if (operation == null) { ++ operation = generateIngressOperation(spanData); ++ } ++ for (AwsXrayAdaptiveSamplingConfig.AnomalyConditions condition : anomalyConditions) { ++ // Skip condition if it would only re-apply action already being taken ++ if ((shouldBoostSampling ++ && AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST.equals( ++ condition.getUsage())) ++ || (shouldCaptureAnomalySpan ++ && AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE.equals( ++ condition.getUsage()))) { ++ continue; ++ } ++ // Check if the operation matches any in the list or if operations list is null (match all) ++ List operations = condition.getOperations(); ++ if (!(operations == null || operations.isEmpty() || operations.contains(operation))) { ++ continue; ++ } ++ // Check if any anomalyConditions detect an anomaly either through error code or latency ++ boolean isAnomaly = false; ++ ++ String errorCodeRegex = condition.getErrorCodeRegex(); ++ if (statusCode != null && errorCodeRegex != null) { ++ isAnomaly = statusCode.toString().matches(errorCodeRegex); ++ } ++ ++ Long highLatencyMs = condition.getHighLatencyMs(); ++ if (highLatencyMs != null) { ++ isAnomaly = ++ (errorCodeRegex == null || isAnomaly) ++ && (span.getLatencyNanos() / 1_000_000.0) >= highLatencyMs; ++ } ++ ++ if (isAnomaly) { ++ AwsXrayAdaptiveSamplingConfig.UsageType usage = condition.getUsage(); ++ if (usage != null) { ++ switch (usage) { ++ case BOTH: ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ break; ++ case SAMPLING_BOOST: ++ shouldBoostSampling = true; ++ break; ++ case ANOMALY_TRACE_CAPTURE: ++ shouldCaptureAnomalySpan = true; ++ break; ++ default: // do nothing ++ } ++ } else { ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ } ++ } ++ if (shouldBoostSampling && shouldCaptureAnomalySpan) { ++ break; ++ } ++ } ++ } else if ((statusCode != null && statusCode > 499) ++ || (statusCode == null ++ && spanData.getStatus() != null ++ && StatusCode.ERROR.equals(spanData.getStatus().getStatusCode()))) { ++ shouldBoostSampling = true; ++ shouldCaptureAnomalySpan = true; ++ } ++ ++ return new AnomalyDetectionResult(shouldBoostSampling, shouldCaptureAnomalySpan); ++ } ++ + static boolean isKeyPresent(SpanData span, AttributeKey key) { + return span.getAttributes().get(key) != null; + } @@ -1360,6 +1511,33 @@ index 75977dc0..9620ba2b 100644 + return "/"; + } + ++ private void updateTraceUsageCache( ++ String traceId, boolean isSpanCaptured, boolean isCountedAsAnomalyForBoost) { ++ AwsXrayAdaptiveSamplingConfig.UsageType existingUsage = traceUsageCache.getIfPresent(traceId); ++ ++ // Any interaction with a cache entry will reset the expiration timer of that entry ++ if (isSpanCaptured && isCountedAsAnomalyForBoost) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else if (isSpanCaptured) { ++ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForBoost(existingUsage)) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else { ++ this.traceUsageCache.put( ++ traceId, AwsXrayAdaptiveSamplingConfig.UsageType.ANOMALY_TRACE_CAPTURE); ++ } ++ } else if (isCountedAsAnomalyForBoost) { ++ if (AwsXrayAdaptiveSamplingConfig.UsageType.isUsedForAnomalyTraceCapture(existingUsage)) { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.BOTH); ++ } else { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.SAMPLING_BOOST); ++ } ++ } else if (existingUsage != null) { ++ this.traceUsageCache.put(traceId, existingUsage); ++ } else { ++ this.traceUsageCache.put(traceId, AwsXrayAdaptiveSamplingConfig.UsageType.NEITHER); ++ } ++ } ++ + private static Map createRuleHashMaps( + List rules) { + Map ruleToHashMap = new HashMap<>(); @@ -1394,21 +1572,39 @@ index 75977dc0..9620ba2b 100644 + Cache getTraceUsageCache() { + traceUsageCache.cleanUp(); + return traceUsageCache; ++ } ++ ++ private static class AnomalyDetectionResult { ++ private final boolean shouldBoostSampling; ++ private final boolean shouldCaptureAnomalySpan; ++ ++ public AnomalyDetectionResult(boolean shouldBoostSampling, boolean shouldCaptureAnomalySpan) { ++ this.shouldBoostSampling = shouldBoostSampling; ++ this.shouldCaptureAnomalySpan = shouldCaptureAnomalySpan; ++ } ++ ++ boolean shouldBoostSampling() { ++ return shouldBoostSampling; ++ } ++ ++ boolean shouldCaptureAnomalySpan() { ++ return shouldCaptureAnomalySpan; ++ } } } diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java -index 4e5cd13b..ec256fe0 100644 +index 4e5cd13b..5af11a25 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java +++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/AwsXrayRemoteSamplerTest.java @@ -7,7 +7,10 @@ package io.opentelemetry.contrib.awsxray; - + import static java.util.Objects.requireNonNull; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.AssertionsForClassTypes.assertThatCode; import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; - + import com.google.common.io.ByteStreams; import com.linecorp.armeria.common.HttpResponse; @@ -21,6 +24,9 @@ import io.opentelemetry.api.trace.SpanKind; @@ -1421,10 +1617,25 @@ index 4e5cd13b..ec256fe0 100644 import io.opentelemetry.sdk.trace.samplers.Sampler; import io.opentelemetry.sdk.trace.samplers.SamplingDecision; import java.io.IOException; -@@ -187,6 +193,31 @@ class AwsXrayRemoteSamplerTest { - } +@@ -169,21 +175,28 @@ class AwsXrayRemoteSamplerTest { } - + + @Test +- void parentBasedXraySamplerAfterDefaultSampler() { +- rulesResponse.set(RULE_RESPONSE_1); +- try (AwsXrayRemoteSampler samplerWithLongerPollingInterval = +- AwsXrayRemoteSampler.newBuilder(Resource.empty()) +- .setInitialSampler(Sampler.alwaysOn()) +- .setEndpoint(server.httpUri().toString()) +- .setPollingInterval(Duration.ofMillis(5)) +- .build()) { +- await() +- .pollDelay(Duration.ofMillis(10)) +- .untilAsserted( +- () -> { +- assertThat(sampler.getDescription()) +- .startsWith("AwsXrayRemoteSampler{ParentBased{root:XrayRulesSampler{["); +- }); + void setAndResetSpanExporter() { + try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { + // Setting span exporter should only work once @@ -1447,16 +1658,13 @@ index 4e5cd13b..ec256fe0 100644 + sampler.setSpanExporter(mock(SpanExporter.class)); + assertThatCode(() -> sampler.adaptSampling(mock(ReadableSpan.class), mock(SpanData.class))) + .doesNotThrowAnyException(); -+ } -+ } -+ - // https://github.com/open-telemetry/opentelemetry-java-contrib/issues/376 - @Test - void testJitterTruncation() { -@@ -206,6 +237,16 @@ class AwsXrayRemoteSamplerTest { } } - + +@@ -206,6 +219,16 @@ class AwsXrayRemoteSamplerTest { + } + } + + @Test + void setAdaptiveSamplingConfig() { + try (AwsXrayRemoteSampler sampler = AwsXrayRemoteSampler.newBuilder(Resource.empty()).build()) { @@ -1471,21 +1679,23 @@ index 4e5cd13b..ec256fe0 100644 return sampler .shouldSample( diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -index 920a5ffd..dcc7118a 100644 +index 920a5ffd..b7c21aa0 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java +++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/SamplingRuleApplierTest.java -@@ -15,18 +15,25 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_ +@@ -15,18 +15,27 @@ import static io.opentelemetry.semconv.incubating.HttpIncubatingAttributes.HTTP_ import static io.opentelemetry.semconv.incubating.NetIncubatingAttributes.NET_HOST_NAME; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; - + import com.fasterxml.jackson.databind.ObjectMapper; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.AttributesBuilder; ++import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanContext; ++import io.opentelemetry.api.trace.SpanId; import io.opentelemetry.api.trace.SpanKind; +import io.opentelemetry.api.trace.TraceFlags; import io.opentelemetry.api.trace.TraceId; @@ -1500,7 +1710,7 @@ index 920a5ffd..dcc7118a 100644 import io.opentelemetry.sdk.trace.samplers.SamplingDecision; import io.opentelemetry.sdk.trace.samplers.SamplingResult; import io.opentelemetry.semconv.HttpAttributes; -@@ -37,6 +44,7 @@ import java.io.IOException; +@@ -37,6 +46,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.time.Duration; import java.time.Instant; @@ -1508,16 +1718,16 @@ index 920a5ffd..dcc7118a 100644 import java.util.Collections; import java.util.Date; import java.util.concurrent.TimeUnit; -@@ -50,6 +58,7 @@ class SamplingRuleApplierTest { +@@ -50,6 +60,7 @@ class SamplingRuleApplierTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - + private static final String CLIENT_ID = "test-client-id"; + private static final String TEST_SERVICE_NAME = "test-service-name"; - + @Nested @SuppressWarnings("ClassCanBeStatic") -@@ -57,7 +66,10 @@ class SamplingRuleApplierTest { - +@@ -57,7 +68,10 @@ class SamplingRuleApplierTest { + private final SamplingRuleApplier applier = new SamplingRuleApplier( - CLIENT_ID, readSamplingRule("/sampling-rule-exactmatch.json"), Clock.getDefault()); @@ -1525,12 +1735,12 @@ index 920a5ffd..dcc7118a 100644 + readSamplingRule("/sampling-rule-exactmatch.json"), + TEST_SERVICE_NAME, + Clock.getDefault()); - + private final Resource resource = Resource.builder() -@@ -91,7 +103,8 @@ class SamplingRuleApplierTest { +@@ -91,7 +105,8 @@ class SamplingRuleApplierTest { .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); - + Date now = new Date(); - GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); + GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = @@ -1538,16 +1748,16 @@ index 920a5ffd..dcc7118a 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -100,7 +113,7 @@ class SamplingRuleApplierTest { +@@ -100,7 +115,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(0); - + // Reset - statistics = applier.snapshot(now); + statistics = applier.snapshot(now).getStatisticsDocument(); assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -108,7 +121,7 @@ class SamplingRuleApplierTest { +@@ -108,7 +123,7 @@ class SamplingRuleApplierTest { doSample(applier); doSample(applier); now = new Date(); @@ -1556,8 +1766,8 @@ index 920a5ffd..dcc7118a 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -283,7 +296,10 @@ class SamplingRuleApplierTest { - +@@ -283,7 +298,10 @@ class SamplingRuleApplierTest { + private final SamplingRuleApplier applier = new SamplingRuleApplier( - CLIENT_ID, readSamplingRule("/sampling-rule-wildcards.json"), Clock.getDefault()); @@ -1565,12 +1775,12 @@ index 920a5ffd..dcc7118a 100644 + readSamplingRule("/sampling-rule-wildcards.json"), + TEST_SERVICE_NAME, + Clock.getDefault()); - + private final Resource resource = Resource.builder() -@@ -316,7 +332,8 @@ class SamplingRuleApplierTest { +@@ -316,7 +334,8 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); - + Date now = new Date(); - GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); + GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = @@ -1578,16 +1788,16 @@ index 920a5ffd..dcc7118a 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -325,7 +342,7 @@ class SamplingRuleApplierTest { +@@ -325,7 +344,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(0); - + // Reset - statistics = applier.snapshot(now); + statistics = applier.snapshot(now).getStatisticsDocument(); assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -333,7 +350,7 @@ class SamplingRuleApplierTest { +@@ -333,7 +352,7 @@ class SamplingRuleApplierTest { doSample(applier); doSample(applier); now = new Date(); @@ -1596,8 +1806,8 @@ index 920a5ffd..dcc7118a 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -626,7 +643,10 @@ class SamplingRuleApplierTest { - +@@ -626,7 +645,10 @@ class SamplingRuleApplierTest { + private final SamplingRuleApplier applier = new SamplingRuleApplier( - CLIENT_ID, readSamplingRule("/sampling-rule-awslambda.json"), Clock.getDefault()); @@ -1605,10 +1815,10 @@ index 920a5ffd..dcc7118a 100644 + readSamplingRule("/sampling-rule-awslambda.json"), + TEST_SERVICE_NAME, + Clock.getDefault()); - + private final Resource resource = Resource.builder() -@@ -677,7 +697,10 @@ class SamplingRuleApplierTest { +@@ -677,7 +699,10 @@ class SamplingRuleApplierTest { void borrowing() { SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1617,12 +1827,12 @@ index 920a5ffd..dcc7118a 100644 + readSamplingRule("/sampling-rule-reservoir.json"), + TEST_SERVICE_NAME, + Clock.getDefault()); - + // Borrow assertThat(doSample(applier)) -@@ -688,7 +711,8 @@ class SamplingRuleApplierTest { +@@ -688,7 +713,8 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); - + Date now = new Date(); - GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = applier.snapshot(now); + GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = @@ -1630,25 +1840,68 @@ index 920a5ffd..dcc7118a 100644 assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -697,7 +721,7 @@ class SamplingRuleApplierTest { +@@ -697,7 +723,7 @@ class SamplingRuleApplierTest { assertThat(statistics.getBorrowCount()).isEqualTo(1); - + // Reset - statistics = applier.snapshot(now); + statistics = applier.snapshot(now).getStatisticsDocument(); assertThat(statistics.getRequestCount()).isEqualTo(0); assertThat(statistics.getSampledCount()).isEqualTo(0); assertThat(statistics.getBorrowCount()).isEqualTo(0); -@@ -713,7 +737,7 @@ class SamplingRuleApplierTest { +@@ -713,7 +739,7 @@ class SamplingRuleApplierTest { }); - + now = new Date(); - statistics = applier.snapshot(now); + statistics = applier.snapshot(now).getStatisticsDocument(); assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); assertThat(statistics.getRuleName()).isEqualTo("Test"); assertThat(statistics.getTimestamp()).isEqualTo(now); -@@ -727,7 +751,7 @@ class SamplingRuleApplierTest { +@@ -722,12 +748,50 @@ class SamplingRuleApplierTest { + assertThat(statistics.getBorrowCount()).isEqualTo(1); + } + ++ @Test ++ void generateStatistics() { ++ SamplingRuleApplier applier = ++ new SamplingRuleApplier( ++ CLIENT_ID, ++ readSamplingRule("/sampling-rule-sample-all.json"), ++ TEST_SERVICE_NAME, ++ Clock.getDefault()); ++ ++ // Send a span for which the sampling decision hasn't been made yet ++ assertThat(doSample(applier)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ ++ // Send spans for which the sampling decision has already been made ++ // Send in different amounts to ensure statistics are generated for correct calls ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ true)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ true)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ assertThat(doSampleSpanWithValidContext(applier, /* isSampled= */ false)) ++ .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); ++ ++ // Verify outgoing statistics ++ Date now = new Date(); ++ GetSamplingTargetsRequest.SamplingStatisticsDocument statistics = ++ applier.snapshot(now).getStatisticsDocument(); ++ assertThat(statistics.getClientId()).isEqualTo(CLIENT_ID); ++ assertThat(statistics.getRuleName()).isEqualTo("Test"); ++ assertThat(statistics.getTimestamp()).isEqualTo(now); ++ assertThat(statistics.getRequestCount()).isEqualTo(1); ++ assertThat(statistics.getSampledCount()).isEqualTo(1); ++ assertThat(statistics.getBorrowCount()).isEqualTo(0); ++ } ++ + @Test + void ruleWithTarget() { TestClock clock = TestClock.create(); SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1657,8 +1910,8 @@ index 920a5ffd..dcc7118a 100644 // No target yet, borrows from reservoir every second. assertThat(doSample(applier)) .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); -@@ -746,8 +770,8 @@ class SamplingRuleApplierTest { - +@@ -746,8 +810,8 @@ class SamplingRuleApplierTest { + // Got a target! SamplingTargetDocument target = - SamplingTargetDocument.create(0.0, 5, 2, Date.from(now.plusSeconds(10)), "test"); @@ -1667,8 +1920,8 @@ index 920a5ffd..dcc7118a 100644 + applier = applier.withTarget(target, Date.from(now), clock.nanoTime()); // Statistics not expired yet assertThat(applier.snapshot(Date.from(now))).isNull(); - -@@ -786,7 +810,7 @@ class SamplingRuleApplierTest { + +@@ -786,7 +850,7 @@ class SamplingRuleApplierTest { TestClock clock = TestClock.create(); SamplingRuleApplier applier = new SamplingRuleApplier( @@ -1677,9 +1930,9 @@ index 920a5ffd..dcc7118a 100644 // No target yet, borrows from reservoir every second. assertThat(doSample(applier)) .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); -@@ -804,8 +828,8 @@ class SamplingRuleApplierTest { +@@ -804,8 +868,8 @@ class SamplingRuleApplierTest { assertThat(applier.snapshot(Date.from(now.plus(Duration.ofMinutes(30))))).isNotNull(); - + // Got a target! - SamplingTargetDocument target = SamplingTargetDocument.create(0.0, 5, null, null, "test"); - applier = applier.withTarget(target, Date.from(now)); @@ -1688,10 +1941,10 @@ index 920a5ffd..dcc7118a 100644 // No reservoir, always use fixed rate (drop) assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); -@@ -815,12 +839,105 @@ class SamplingRuleApplierTest { +@@ -815,12 +879,105 @@ class SamplingRuleApplierTest { assertThat(applier.snapshot(Date.from(now))).isNotNull(); } - + + @Test + void ruleWithBoost() { + TestClock clock = TestClock.create(); @@ -1792,13 +2045,13 @@ index 920a5ffd..dcc7118a 100644 new SamplingRuleApplier( - CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), clock); + CLIENT_ID, readSamplingRule("/sampling-rule-reservoir.json"), TEST_SERVICE_NAME, clock); - + Instant now = Instant.ofEpochSecond(0, clock.now()); assertThat(applier.snapshot(Date.from(now))).isNotNull(); -@@ -839,6 +956,71 @@ class SamplingRuleApplierTest { +@@ -839,6 +996,71 @@ class SamplingRuleApplierTest { assertThat(doSample(applier)).isEqualTo(SamplingResult.create(SamplingDecision.DROP)); } - + + @Test + void hasBoostMethod() { + SamplingRuleApplier applierWithBoost = @@ -1867,21 +2120,50 @@ index 920a5ffd..dcc7118a 100644 private static SamplingResult doSample(SamplingRuleApplier applier) { return applier.shouldSample( Context.current(), +@@ -849,6 +1071,28 @@ class SamplingRuleApplierTest { + Collections.emptyList()); + } + ++ private static SamplingResult doSampleSpanWithValidContext( ++ SamplingRuleApplier applier, boolean isSampled) { ++ String traceId = TraceId.fromLongs(1, 2); ++ Context parentContext = ++ Context.root() ++ .with( ++ Span.wrap( ++ SpanContext.create( ++ traceId, ++ SpanId.fromLong(1L), ++ isSampled ? TraceFlags.getSampled() : TraceFlags.getDefault(), ++ TraceState.getDefault()))); ++ ++ return applier.shouldSample( ++ parentContext, ++ traceId, ++ SpanId.fromLong(2L), ++ SpanKind.CLIENT, ++ Attributes.empty(), ++ Collections.emptyList()); ++ } ++ + private static GetSamplingRulesResponse.SamplingRule readSamplingRule(String resourcePath) { + try { + return OBJECT_MAPPER.readValue( diff --git a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java -index 1ca8df34..72ec524b 100644 +index 1ca8df34..14ebdbda 100644 --- a/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java +++ b/aws-xray/src/test/java/io/opentelemetry/contrib/awsxray/XrayRulesSamplerTest.java @@ -5,17 +5,28 @@ - + package io.opentelemetry.contrib.awsxray; - + +import static io.opentelemetry.semconv.HttpAttributes.HTTP_RESPONSE_STATUS_CODE; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; - + import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.SpanContext; @@ -1912,9 +2194,9 @@ index 1ca8df34..72ec524b 100644 import java.util.stream.Collectors; import java.util.stream.Stream; import org.junit.jupiter.api.Test; - + class XrayRulesSamplerTest { - + + private static final AttributeKey URL_PATH = AttributeKey.stringKey("url.path"); + private static final AttributeKey HTTP_METHOD = AttributeKey.stringKey("http.method"); + @@ -1958,24 +2240,26 @@ index 1ca8df34..72ec524b 100644 - 1); + 1, + null); - + TestClock clock = TestClock.create(); XrayRulesSampler sampler = -@@ -103,22 +124,58 @@ class XrayRulesSamplerTest { +@@ -103,22 +124,72 @@ class XrayRulesSamplerTest { Resource.getDefault(), clock, Sampler.alwaysOn(), - Arrays.asList(rule1, rule4, rule3, rule2)); + Arrays.asList(rule1, rule4, rule3, rule2), + null); - + assertThat(doSample(sampler, "cat-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); + .usingRecursiveComparison() + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("cat-rule"))); assertThat(doSample(sampler, "cat-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); @@ -1983,7 +2267,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("cat-rule"))); assertThat(doSample(sampler, "dog-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); @@ -1991,7 +2277,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), + XrayRulesSampler.hashRuleName("dog-rule"))); assertThat(doSample(sampler, "dog-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); @@ -1999,7 +2287,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), + XrayRulesSampler.hashRuleName("dog-rule"))); assertThat(doSample(sampler, "bat-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); @@ -2007,7 +2297,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("bat-rule"))); assertThat(doSample(sampler, "bat-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.RECORD_AND_SAMPLE)); @@ -2015,7 +2307,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("bat-rule"))); assertThat(doSample(sampler, "unknown")) - .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); @@ -2023,25 +2317,27 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "default-rule") ++ .build(), + XrayRulesSampler.hashRuleName("default-rule"))); - + Instant now = Instant.ofEpochSecond(0, clock.now()); assertThat(sampler.snapshot(Date.from(now))).hasSize(4); -@@ -128,10 +185,10 @@ class XrayRulesSamplerTest { +@@ -128,10 +199,10 @@ class XrayRulesSamplerTest { assertThat(sampler.snapshot(Date.from(now))).hasSize(4); - + SamplingTargetDocument catTarget = - SamplingTargetDocument.create(0.0, 10, null, null, "cat-rule"); + SamplingTargetDocument.create(0.0, 10, null, null, null, "cat-rule"); - + SamplingTargetDocument batTarget = - SamplingTargetDocument.create(0.0, 5, null, null, "bat-rule"); + SamplingTargetDocument.create(0.0, 5, null, null, null, "bat-rule"); - + clock.advance(Duration.ofSeconds(10)); now = Instant.ofEpochSecond(0, clock.now()); -@@ -145,16 +202,41 @@ class XrayRulesSamplerTest { +@@ -145,16 +216,51 @@ class XrayRulesSamplerTest { .collect(Collectors.toSet()), Date.from(now)); assertThat(doSample(sampler, "dog-service")) @@ -2050,7 +2346,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.RECORD_AND_SAMPLE, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), + XrayRulesSampler.hashRuleName("dog-rule"))); assertThat(doSample(sampler, "dog-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); @@ -2058,7 +2356,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "dog-rule") ++ .build(), + XrayRulesSampler.hashRuleName("dog-rule"))); assertThat(doSample(sampler, "unknown")) - .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); @@ -2066,7 +2366,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "default-rule") ++ .build(), + XrayRulesSampler.hashRuleName("default-rule"))); // Targets overridden to always drop. assertThat(doSample(sampler, "cat-service")) @@ -2075,7 +2377,9 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "cat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("cat-rule"))); assertThat(doSample(sampler, "bat-service")) - .isEqualTo(SamplingResult.create(SamplingDecision.DROP)); @@ -2083,15 +2387,17 @@ index 1ca8df34..72ec524b 100644 + .isEqualTo( + AwsSamplingResult.create( + SamplingDecision.DROP, -+ Attributes.empty(), ++ Attributes.builder() ++ .put(XrayRulesSampler.AWS_XRAY_SAMPLING_RULE, "bat-rule") ++ .build(), + XrayRulesSampler.hashRuleName("bat-rule"))); - + // Minimum is batTarget, 5s from now assertThat(sampler.nextTargetFetchTimeNanos()) -@@ -169,6 +251,867 @@ class XrayRulesSamplerTest { +@@ -169,6 +275,891 @@ class XrayRulesSamplerTest { assertThat(sampler.snapshot(Date.from(now))).hasSize(4); } - + + @Test + void updateTargetsWithLocalAdaptiveSamplingConfig() { + SamplingRule rule1 = @@ -2430,6 +2736,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + SpanData spanDataMock = mock(SpanData.class); + Attributes attributesMock = mock(Attributes.class); + when(spanDataMock.getAttributes()).thenReturn(attributesMock); @@ -2574,6 +2884,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); + when(readableSpanMock.getLatencyNanos()).thenReturn(1L); + @@ -2642,6 +2956,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); + when(readableSpanMock.getLatencyNanos()).thenReturn(300_000_000L); // 300 ms + @@ -2711,6 +3029,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + when(readableSpanMock.getAttribute(any())).thenReturn("test-operation"); + when(readableSpanMock.getLatencyNanos()).thenReturn(300_000_000L); // 300 ms + @@ -2796,6 +3118,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + + SpanData spanDataMock = mock(SpanData.class); + Attributes attributesMock = mock(Attributes.class); @@ -2879,6 +3205,10 @@ index 1ca8df34..72ec524b 100644 + .thenReturn( + SpanContext.create( + "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); ++ when(readableSpanMock.getParentSpanContext()) ++ .thenReturn( ++ SpanContext.create( ++ "TRACE_ID", "SPAN_ID", TraceFlags.getDefault(), TraceState.getDefault())); + when(readableSpanMock.getLatencyNanos()).thenReturn(1L); + + SpanData spanDataMock = mock(SpanData.class); @@ -2968,7 +3298,7 @@ index 283e3b3c..cf0cb072 100644 + .build()), + Collections.emptyList()); GetSamplingTargetsResponse response = client.getSamplingTargets(samplingTargetsRequest); - + AggregatedHttpRequest request = server.takeRequest().request(); @@ -174,7 +175,8 @@ class XraySamplerClientTest { assertThatThrownBy( @@ -3008,6 +3338,27 @@ index 00000000..32752d5e + "speed": "0" + } +} +diff --git a/aws-xray/src/test/resources/sampling-rule-sample-all.json b/aws-xray/src/test/resources/sampling-rule-sample-all.json +new file mode 100644 +index 00000000..4ba3013a +--- /dev/null ++++ b/aws-xray/src/test/resources/sampling-rule-sample-all.json +@@ -0,0 +1,15 @@ ++{ ++ "RuleName": "Test", ++ "RuleARN": "arn:aws:xray:us-east-1:595986152929:sampling-rule/Test", ++ "ResourceARN": "arn:aws:xray:us-east-1:595986152929:my-service", ++ "Priority": 1, ++ "FixedRate": 1.0, ++ "ReservoirSize": 0, ++ "ServiceName": "*", ++ "ServiceType": "*", ++ "Host": "*", ++ "HTTPMethod": "*", ++ "URLPath": "*", ++ "Version": 1, ++ "Attributes": {} ++} diff --git a/disk-buffering/build.gradle.kts b/disk-buffering/build.gradle.kts index 8250c1bd..74a1a24c 100644 --- a/disk-buffering/build.gradle.kts diff --git a/.github/patches/opentelemetry-java-instrumentation.patch b/.github/patches/opentelemetry-java-instrumentation.patch new file mode 100644 index 0000000000..988a048b1d --- /dev/null +++ b/.github/patches/opentelemetry-java-instrumentation.patch @@ -0,0 +1,28 @@ +diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts +index 98def282f8..65fd6a8a13 100644 +--- a/dependencyManagement/build.gradle.kts ++++ b/dependencyManagement/build.gradle.kts +@@ -104,7 +104,7 @@ val DEPENDENCIES = listOf( + "io.netty:netty:3.10.6.Final", + "io.opentelemetry.contrib:opentelemetry-azure-resources:${otelContribVersion}", + "io.opentelemetry.contrib:opentelemetry-aws-resources:${otelContribVersion}", +- "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:${otelContribVersion}", ++ "io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.48.0-alpha-adot1", + "io.opentelemetry.contrib:opentelemetry-gcp-resources:${otelContribVersion}", + "io.opentelemetry.contrib:opentelemetry-cloudfoundry-resources:${otelContribVersion}", + "io.opentelemetry.contrib:opentelemetry-baggage-processor:${otelContribVersion}", +diff --git a/version.gradle.kts b/version.gradle.kts +index 023d04703c..ec9690086c 100644 +--- a/version.gradle.kts ++++ b/version.gradle.kts +@@ -1,5 +1,5 @@ +-val stableVersion = "2.18.1" +-val alphaVersion = "2.18.1-alpha" ++val stableVersion = "2.18.1-adot1" ++val alphaVersion = "2.18.1-adot1-alpha" + + allprojects { + if (findProperty("otel.stable") != "true") { +-- +2.45.1 + diff --git a/.github/scripts/patch.sh b/.github/scripts/patch.sh index b6a6bba94e..9d2c902a61 100755 --- a/.github/scripts/patch.sh +++ b/.github/scripts/patch.sh @@ -44,3 +44,16 @@ if [[ -f "$OTEL_JAVA_CONTRIB_PATCH" ]]; then else echo "Skipping patching opentelemetry-java-contrib" fi + + +OTEL_JAVA_INSTRUMENTATION_PATCH=".github/patches/opentelemetry-java-instrumentation.patch" +if [[ -f "$OTEL_JAVA_INSTRUMENTATION_PATCH" ]]; then + git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git + cd opentelemetry-java-instrumentation + git checkout ${OTEL_JAVA_INSTRUMENTATION_VERSION} -b tag-${OTEL_JAVA_INSTRUMENTATION_VERSION} + patch -p1 < "../${OTEL_JAVA_INSTRUMENTATION_PATCH}" + git commit -a -m "ADOT Patch release" + cd - +else + echo "Skipping patching opentelemetry-java-instrumentation" +fi diff --git a/CHANGELOG.md b/CHANGELOG.md index dcfaa0543d..6750e1dc6d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,4 +16,6 @@ If your change does not need a CHANGELOG entry, add the "skip changelog" label t ### Enhancements - Support X-Ray Trace Id extraction from Lambda Context object, and respect user-configured OTEL_PROPAGATORS in AWS Lamdba instrumentation - ([#1191](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1191)) \ No newline at end of file + ([#1191](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1191)) +- Adaptive Sampling improvements: Ensure propagation of sampling rule across services and AWS accounts. Remove unnecessary B3 propagator. + ([#1201](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/1201)) \ No newline at end of file diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index d6218a08b2..cf5f3c2f26 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,7 +27,7 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.18.1" +val otelVersion = "2.18.1-adot1" val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" diff --git a/lambda-layer/build-layer.sh b/lambda-layer/build-layer.sh index 791ad59152..473d83317d 100755 --- a/lambda-layer/build-layer.sh +++ b/lambda-layer/build-layer.sh @@ -2,22 +2,31 @@ set -e SOURCEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" +file="$SOURCEDIR/../.github/patches/versions" +contrib_version=$(awk -F'=v' '/OTEL_JAVA_CONTRIB_VERSION/ {print $2}' "$file") +if [[ -n "$contrib_version" ]]; then + echo "Found OTEL Contrib Version: ${contrib_version}" + ## Clone and Patch the OpenTelemetry Java contrib Repository + echo "Info: Cloning and Patching OpenTelemetry Java contrib Repository" + git clone https://github.com/open-telemetry/opentelemetry-java-contrib.git + pushd opentelemetry-java-contrib + git checkout v${contrib_version} -b tag-v${contrib_version} -## Get ADOT version -echo "Info: Getting ADOT Version" -pushd "$SOURCEDIR"/.. -version=$(./gradlew -q printVersion) -echo "Found ADOT Version: ${version}" -popd + # There is another patch in the .github/patches directory for other changes. We should apply them too for consistency. + patch -p1 < "$SOURCEDIR"/../.github/patches/opentelemetry-java-contrib.patch + + ./gradlew publishToMavenLocal + popd + rm -rf opentelemetry-java-contrib +fi ## Get OTel version echo "Info: Getting OTEL Version" -file="$SOURCEDIR/../.github/patches/versions" -otel_instrumentation_version=$(awk -F'=v' '/OTEL_JAVA_INSTRUMENTATION_VERSION/ {print $2}' "$file") -echo "Found OTEL Version: ${otel_instrumentation_version}" +version=$(awk -F'=v' '/OTEL_JAVA_INSTRUMENTATION_VERSION/ {print $2}' "$file") +echo "Found OTEL Version: ${version}" # Exit if the version is empty or null -if [[ -z "$otel_instrumentation_version" ]]; then +if [[ -z "$version" ]]; then echo "Error: Version could not be found in ${file}." exit 1 fi @@ -27,7 +36,10 @@ fi echo "Info: Cloning and Patching OpenTelemetry Java Instrumentation Repository" git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git pushd opentelemetry-java-instrumentation -git checkout v${otel_instrumentation_version} -b tag-v${otel_instrumentation_version} +git checkout v${version} -b tag-v${version} + +# There is another patch in the .github/patches directory for other changes. We should apply them too for consistency. +patch -p1 < "$SOURCEDIR"/../.github/patches/opentelemetry-java-instrumentation.patch # This patch is for Lambda related context propagation patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch @@ -36,23 +48,6 @@ patch -p1 < "$SOURCEDIR"/patches/opentelemetry-java-instrumentation.patch popd rm -rf opentelemetry-java-instrumentation -contrib_version=$(awk -F'=v' '/OTEL_JAVA_CONTRIB_VERSION/ {print $2}' "$file") -if [[ -n "$contrib_version" ]]; then - echo "Found OTEL Contrib Version: ${contrib_version}" - ## Clone and Patch the OpenTelemetry Java contrib Repository - echo "Info: Cloning and Patching OpenTelemetry Java contrib Repository" - git clone https://github.com/open-telemetry/opentelemetry-java-contrib.git - pushd opentelemetry-java-contrib - git checkout v${contrib_version} -b tag-v${contrib_version} - - # There is another patch in the .github/patches directory for other changes. We should apply them too for consistency. - patch -p1 < "$SOURCEDIR"/../.github/patches/opentelemetry-java-contrib.patch - - ./gradlew publishToMavenLocal - popd - rm -rf opentelemetry-java-contrib -fi - ## Build the ADOT Java from current source echo "Info: Building ADOT Java from current source" pushd "$SOURCEDIR"/.. diff --git a/lambda-layer/patches/aws-otel-java-instrumentation.patch b/lambda-layer/patches/aws-otel-java-instrumentation.patch index bbd66b64c1..f95c364151 100644 --- a/lambda-layer/patches/aws-otel-java-instrumentation.patch +++ b/lambda-layer/patches/aws-otel-java-instrumentation.patch @@ -6,7 +6,7 @@ index d186406..91b9386 100644 val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM --val otelVersion = "2.18.1" +-val otelVersion = "2.18.1-adot1" +val otelVersion = "2.18.1-adot-lambda1" val otelSnapshotVersion = "2.19.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" diff --git a/lambda-layer/patches/opentelemetry-java-instrumentation.patch b/lambda-layer/patches/opentelemetry-java-instrumentation.patch index c90c3bb0fa..f82cfd273f 100644 --- a/lambda-layer/patches/opentelemetry-java-instrumentation.patch +++ b/lambda-layer/patches/opentelemetry-java-instrumentation.patch @@ -618,8 +618,8 @@ index 023d04703c..b267166804 100644 --- a/version.gradle.kts +++ b/version.gradle.kts @@ -1,5 +1,5 @@ --val stableVersion = "2.18.1" --val alphaVersion = "2.18.1-alpha" +-val stableVersion = "2.18.1-adot1" +-val alphaVersion = "2.18.1-adot1-alpha" +val stableVersion = "2.18.1-adot-lambda1" +val alphaVersion = "2.18.1-adot-lambda1-alpha" diff --git a/scripts/local_patch.sh b/scripts/local_patch.sh index 079d4516b9..d1c01c5d8b 100755 --- a/scripts/local_patch.sh +++ b/scripts/local_patch.sh @@ -56,4 +56,28 @@ if [[ -f "$OTEL_JAVA_CONTRIB_PATCH" ]]; then rm -rf opentelemetry-java-contrib else echo "Skipping patching opentelemetry-java-contrib" +fi + + +# Patching opentelemetry-java-instrumentation +OTEL_JAVA_INSTRUMENTATION_PATCH=".github/patches/opentelemetry-java-instrumentation.patch" +if [[ -f "$OTEL_JAVA_INSTRUMENTATION_PATCH" ]]; then + echo "Patching opentelemetry-java-instrumentation" + git clone https://github.com/open-telemetry/opentelemetry-java-instrumentation.git + cd opentelemetry-java-instrumentation + + echo "Checking out tag ${OTEL_JAVA_INSTRUMENTATION_VERSION}" + git checkout ${OTEL_JAVA_INSTRUMENTATION_VERSION} -b tag-${OTEL_JAVA_INSTRUMENTATION_VERSION} + patch -p1 < "../${OTEL_JAVA_INSTRUMENTATION_PATCH}" + git commit -a -m "ADOT Patch release" + + echo "Building patched opentelemetry-java-instrumentation" + ./gradlew clean assemble + ./gradlew publishToMavenLocal + cd - + + echo "Cleaning up opentelemetry-java-instrumentation" + rm -rf opentelemetry-java-instrumentation +else + echo "Skipping patching opentelemetry-java-instrumentation" fi \ No newline at end of file From 8ad0b24159382bc0a924665cb5ab0c449def0639 Mon Sep 17 00:00:00 2001 From: Thomas Pierce Date: Mon, 22 Sep 2025 19:02:44 -0700 Subject: [PATCH 66/83] feat: add self-validating workflow gate jobs (#1213) Add gate jobs that fail if any workflow job fails OR if any job is missing from the gate's needs array. Prevents both job failures and configuration drift when adding new workflow jobs. Callout: I don't think it's possible to have one gate for both workflows, but it should not be the case that we add more over time. ### Testing: See: https://github.com/aws-observability/aws-otel-python-instrumentation/pull/477 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/codeql-analysis.yml | 35 +++++++++++++++++++++++++++ .github/workflows/pr-build.yml | 34 ++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index add4ca8507..a88aa6baeb 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -60,3 +60,38 @@ jobs: - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 + + all-codeql-checks-pass: + runs-on: ubuntu-latest + needs: [analyze] + if: always() + steps: + - name: Checkout to get workflow file + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + + - name: Check all jobs succeeded and none missing + run: | + # Check if all needed jobs succeeded + results='${{ toJSON(needs) }}' + if echo "$results" | jq -r '.[] | .result' | grep -v success; then + echo "Some jobs failed" + exit 1 + fi + + # Extract all job names from workflow (excluding this gate job) + all_jobs=$(yq eval '.jobs | keys | .[]' .github/workflows/codeql.yml | grep -v "all-codeql-checks-pass" | sort) + + # Extract job names from needs array + needed_jobs='${{ toJSON(needs) }}' + needs_list=$(echo "$needed_jobs" | jq -r 'keys[]' | sort) + + # Check if any jobs are missing from needs + missing_jobs=$(comm -23 <(echo "$all_jobs") <(echo "$needs_list")) + if [ -n "$missing_jobs" ]; then + echo "ERROR: Jobs missing from needs array in all-codeql-checks-pass:" + echo "$missing_jobs" + echo "Please add these jobs to the needs array of all-codeql-checks-pass" + exit 1 + fi + + echo "All CodeQL checks passed and no jobs missing from gate!" diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 5b343c82be..822c20c710 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -219,3 +219,37 @@ jobs: working-directory: lambda-layer run: ./build-layer.sh + all-pr-checks-pass: + runs-on: ubuntu-latest + needs: [changelog-check, testpatch, build, build-lambda] + if: always() + steps: + - name: Checkout to get workflow file + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + + - name: Check all jobs succeeded and none missing + run: | + # Check if all needed jobs succeeded + results='${{ toJSON(needs) }}' + if echo "$results" | jq -r '.[] | .result' | grep -v success; then + echo "Some jobs failed" + exit 1 + fi + + # Extract all job names from workflow (excluding this gate job) + all_jobs=$(yq eval '.jobs | keys | .[]' .github/workflows/pr-build.yml | grep -v "all-pr-checks-pass" | sort) + + # Extract job names from needs array + needed_jobs='${{ toJSON(needs) }}' + needs_list=$(echo "$needed_jobs" | jq -r 'keys[]' | sort) + + # Check if any jobs are missing from needs + missing_jobs=$(comm -23 <(echo "$all_jobs") <(echo "$needs_list")) + if [ -n "$missing_jobs" ]; then + echo "ERROR: Jobs missing from needs array in all-pr-checks-pass:" + echo "$missing_jobs" + echo "Please add these jobs to the needs array of all-pr-checks-pass" + exit 1 + fi + + echo "All checks passed and no jobs missing from gate!" From fbeaff823c2df184c0ec55a36c9cbc7cbff87841 Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Tue, 23 Sep 2025 11:02:54 -0700 Subject: [PATCH 67/83] Update main-build.yml (#1217) Adding workflow_dispatch for manual run option. By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/main-build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 113b3b7d4e..66e977d5b4 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -4,6 +4,7 @@ on: branches: - main - "release/v*" + workflow_dispatch: # be able to run the workflow on demand env: AWS_DEFAULT_REGION: us-east-1 STAGING_ECR_REGISTRY: 611364707713.dkr.ecr.us-west-2.amazonaws.com From d9ed463849f21afaa270f213d26afa8103739908 Mon Sep 17 00:00:00 2001 From: Miqueas Herrera Date: Tue, 23 Sep 2025 14:41:08 -0700 Subject: [PATCH 68/83] Update 3p actions from VID to CSHA (#1205) This pr updates 3p actions from VID to CSHA for remaining files. References: https://github.com/actions/checkout https://github.com/actions/setup-python https://github.com/actions/setup-java https://github.com/actions/setup-node https://github.com/actions/cache https://github.com/actions/upload-artifact https://github.com/actions/setup-node https://github.com/aws-actions/configure-aws-credentials https://github.com/actions/download-artifact https://github.com/aws-actions/aws-secretsmanager-get-secrets https://github.com/docker/login-action https://github.com/docker/setup-buildx-action https://github.com/docker/build-push-action https://github.com/docker/setup-qemu-action https://github.com/gradle/actions/blob/f8140229023a7015c7ce4df6f7c390a3cace8f83/docs/deprecation-upgrade-guide.md#using-the-action-to-execute-gradle-via-the-arguments-parameter-is-deprecated https://github.com/github/codeql-action https://github.com/hashicorp/setup-terraform https://github.com/codecov/codecov-action https://github.com/burrunan/gradle-cache-action https://github.com/JasonEtco/create-an-issue https://github.com/benchmark-action/github-action-benchmark By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --------- Co-authored-by: Thomas Pierce --- .github/actions/cpUtility-testing/action.yml | 12 ++++---- .github/actions/image_scan/action.yml | 2 +- .github/actions/patch-dependencies/action.yml | 8 +++--- .github/workflows/codeql-analysis.yml | 7 ++--- .github/workflows/daily-scan.yml | 7 ++--- .../docker-build-smoke-tests-fake-backend.yml | 7 ++--- .../e2e-tests-app-with-java-agent.yml | 7 ++--- .github/workflows/e2e-tests-with-operator.yml | 2 +- .github/workflows/main-build.yml | 14 ++++------ .../nightly-upstream-snapshot-build.yml | 20 ++++++------- .github/workflows/patch-release-build.yml | 27 +++++++++--------- .../workflows/post-release-version-bump.yml | 8 +++--- .github/workflows/pr-build.yml | 28 +++++++++---------- .github/workflows/pre-release-prepare.yml | 6 ++-- .github/workflows/publish-status.yml | 2 +- .github/workflows/release-build.yml | 26 ++++++++--------- .github/workflows/release-lambda.yml | 18 ++++++------ .github/workflows/release-udp-exporter.yml | 2 +- .github/workflows/soak-testing.yml | 12 ++++---- .github/workflows/stale-bot.yml | 2 +- .github/workflows/udp-exporter-e2e-test.yml | 4 +-- 21 files changed, 107 insertions(+), 114 deletions(-) diff --git a/.github/actions/cpUtility-testing/action.yml b/.github/actions/cpUtility-testing/action.yml index 883763ccdc..a59ad5ac05 100644 --- a/.github/actions/cpUtility-testing/action.yml +++ b/.github/actions/cpUtility-testing/action.yml @@ -25,28 +25,28 @@ runs: using: "composite" steps: - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ inputs.snapshot-ecr-role }} aws-region: ${{ inputs.aws-region }} - name: Login to private staging ecr - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: ${{ inputs.image_registry }} env: AWS_REGION: ${{ inputs.aws-region }} - name: Build image for testing - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ inputs.adot-java-version }}" @@ -60,7 +60,7 @@ runs: run: .github/scripts/test-adot-javaagent-image.sh "${{ inputs.image_uri_with_tag }}" "${{ inputs.adot-java-version }}" - name: Build and push image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ inputs.adot-java-version }}" diff --git a/.github/actions/image_scan/action.yml b/.github/actions/image_scan/action.yml index 7a98a5906a..57a30dae58 100644 --- a/.github/actions/image_scan/action.yml +++ b/.github/actions/image_scan/action.yml @@ -32,7 +32,7 @@ runs: run: docker logout public.ecr.aws - name: Run Trivy vulnerability scanner on image - uses: aquasecurity/trivy-action@master + uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 #v0.33.1 with: image-ref: ${{ inputs.image-ref }} severity: ${{ inputs.severity }} diff --git a/.github/actions/patch-dependencies/action.yml b/.github/actions/patch-dependencies/action.yml index 529d956221..106d9404e4 100644 --- a/.github/actions/patch-dependencies/action.yml +++ b/.github/actions/patch-dependencies/action.yml @@ -65,14 +65,14 @@ runs: shell: bash - name: Build opentelemetry-java with tests - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests != 'false' }} with: arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java - name: Build opentelemetry-java - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java == 'true' && inputs.run_tests == 'false' }} with: arguments: publishToMavenLocal --scan --no-daemon @@ -84,14 +84,14 @@ runs: shell: bash - name: Build opentelemetry-java-contrib with tests - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests != 'false' }} with: arguments: build publishToMavenLocal --scan --no-daemon build-root-directory: opentelemetry-java-contrib - name: Build opentelemetry-java-contrib - uses: gradle/gradle-build-action@v2 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa #v2 if: ${{ env.patch_otel_java_contrib == 'true' && inputs.run_tests == 'false' }} with: arguments: publishToMavenLocal --scan --no-daemon diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a88aa6baeb..1ff9f43d9a 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -52,11 +52,10 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 - - name: Manually build to avoid autobuild failures - run: ./gradlew build + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 + with: + arguments: build - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@16df4fbc19aea13d921737861d6c622bf3cefe23 #v3.30.3 diff --git a/.github/workflows/daily-scan.yml b/.github/workflows/daily-scan.yml index e1d175629b..f1c379a848 100644 --- a/.github/workflows/daily-scan.yml +++ b/.github/workflows/daily-scan.yml @@ -52,11 +52,10 @@ jobs: - name: Publish patched dependencies to maven local uses: ./.github/actions/patch-dependencies - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 - - name: Build JAR - run: ./gradlew assemble -PlocalDocker=true + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 + with: + arguments: assemble -PlocalDocker=true # See http://jeremylong.github.io/DependencyCheck/dependency-check-cli/ for installation explanation - name: Install and run dependency scan diff --git a/.github/workflows/docker-build-smoke-tests-fake-backend.yml b/.github/workflows/docker-build-smoke-tests-fake-backend.yml index f1b863528a..6226dbac5d 100644 --- a/.github/workflows/docker-build-smoke-tests-fake-backend.yml +++ b/.github/workflows/docker-build-smoke-tests-fake-backend.yml @@ -49,8 +49,7 @@ jobs: with: registry: public.ecr.aws - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 - - name: Build and push docker image - run: ./gradlew :smoke-tests:fakebackend:jib + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 + with: + arguments: :smoke-tests:fakebackend:jib diff --git a/.github/workflows/e2e-tests-app-with-java-agent.yml b/.github/workflows/e2e-tests-app-with-java-agent.yml index 8d1f8200aa..0fc0045c62 100644 --- a/.github/workflows/e2e-tests-app-with-java-agent.yml +++ b/.github/workflows/e2e-tests-app-with-java-agent.yml @@ -64,11 +64,10 @@ jobs: with: registry: public.ecr.aws - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 - - name: Build and push agent and testing docker images with Gradle - run: ./gradlew jib + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 + with: + arguments: jib env: COMMIT_HASH: ${{ inputs.image_tag }} diff --git a/.github/workflows/e2e-tests-with-operator.yml b/.github/workflows/e2e-tests-with-operator.yml index 9b73ff8c57..3c4ebe301a 100644 --- a/.github/workflows/e2e-tests-with-operator.yml +++ b/.github/workflows/e2e-tests-with-operator.yml @@ -71,7 +71,7 @@ jobs: registry: public.ecr.aws - name: Build and push Sample-Apps without Auto-Instrumentation Agent - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 - name: Build and push Sample-Apps with Auto-Instrumentation Agent run: jibBuildWithoutAgent diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 66e977d5b4..622cba16e3 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -90,11 +90,10 @@ jobs: with: registry: public.ecr.aws - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #4.4.3 - - name: Build snapshot with Gradle - run: ./gradlew build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 + with: + arguments: build integrationTests snapshot --stacktrace -PenableCoverage=true -PlocalDocker=true env: PUBLISH_TOKEN_USERNAME: ${{ secrets.PUBLISH_TOKEN_USERNAME }} PUBLISH_TOKEN_PASSWORD: ${{ secrets.PUBLISH_TOKEN_PASSWORD }} @@ -223,11 +222,10 @@ jobs: - name: Pull base image of Contract Tests Sample Apps run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - - name: Setup Gradle - uses: gradle/actions/setup-gradle@ed408507eac070d1f99cc633dbcf757c94c7933a #v4.4.3 - - name: Build snapshot with Gradle - run: ./gradlew contractTests -PlocalDocker=true + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 + with: + arguments: contractTests -PlocalDocker=true application-signals-lambda-layer-build: runs-on: ubuntu-latest diff --git a/.github/workflows/nightly-upstream-snapshot-build.yml b/.github/workflows/nightly-upstream-snapshot-build.yml index dc3c66ad45..1c845748f1 100644 --- a/.github/workflows/nightly-upstream-snapshot-build.yml +++ b/.github/workflows/nightly-upstream-snapshot-build.yml @@ -23,7 +23,7 @@ jobs: image_name: ${{ steps.imageOutput.outputs.imageName }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 @@ -34,7 +34,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -49,18 +49,18 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build --stacktrace -PenableCoverage=true -PtestUpstreamSnapshots=true env: @@ -95,7 +95,7 @@ jobs: snapshot-ecr-role: ${{ secrets.JAVA_INSTRUMENTATION_SNAPSHOT_ECR }} - name: Upload to GitHub Actions - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-agent.jar path: otelagent/build/libs/aws-opentelemetry-agent-*.jar @@ -129,7 +129,7 @@ jobs: runs-on: ubuntu-latest needs: build steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 @@ -139,13 +139,13 @@ jobs: - uses: gradle/actions/wrapper-validation@ed408507eac070d1f99cc633dbcf757c94c7933a # v4.4.3 - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -153,7 +153,7 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Build snapshot with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: contractTests -PtestUpstreamSnapshots=true -PlocalDocker=true diff --git a/.github/workflows/patch-release-build.yml b/.github/workflows/patch-release-build.yml index 66c066ca9e..47a5b0c8e4 100644 --- a/.github/workflows/patch-release-build.yml +++ b/.github/workflows/patch-release-build.yml @@ -37,14 +37,14 @@ jobs: name: Check out release branch # Will fail if there is no release branch yet or succeed otherwise continue-on-error: true - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ steps.parse-release-branch.outputs.release-branch-name }} - id: checkout-release-tag name: Check out release tag # If there is already a release branch, the previous step succeeds and we don't run this or the next one. if: ${{ steps.checkout-release-branch.outcome == 'failure' }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ steps.parse-release-branch.outputs.release-tag-name }} - name: Create release branch @@ -57,7 +57,7 @@ jobs: needs: prepare-release-branch steps: - name: Checkout release branch - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: ${{ needs.prepare-release-branch.outputs.release-branch-name }} @@ -66,12 +66,12 @@ jobs: java-version-file: .java-version distribution: 'temurin' - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws @@ -88,33 +88,32 @@ jobs: # Trim whitespaces and cherrypick echo $word | sed 's/ *$//g' | sed 's/^ *//g' | git cherry-pick --stdin done - - name: Build release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build integrationTests -PlocalDocker=true -Prelease.version=${{ github.event.inputs.version }} --stacktrace - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -128,7 +127,7 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" - name: Build and push image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -138,7 +137,7 @@ jobs: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:v${{ github.event.inputs.version }} - name: Build and Publish release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build final closeAndReleaseSonatypeStagingRepository -Prelease.version=${{ github.event.inputs.version }} --stacktrace env: diff --git a/.github/workflows/post-release-version-bump.yml b/.github/workflows/post-release-version-bump.yml index 93835b88de..b559262338 100644 --- a/.github/workflows/post-release-version-bump.yml +++ b/.github/workflows/post-release-version-bump.yml @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout main - uses: actions/checkout@v2 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: main fetch-depth: 0 @@ -63,13 +63,13 @@ jobs: needs: check-version steps: - name: Configure AWS credentials for BOT secrets - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get Bot secrets - uses: aws-actions/aws-secretsmanager-get-secrets@v1 + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 id: bot_secrets with: secret-ids: | @@ -77,7 +77,7 @@ jobs: parse-json-secrets: true - name: Setup Git - uses: actions/checkout@v2 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 822c20c710..7d4a9545eb 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -17,7 +17,7 @@ jobs: changelog-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: fetch-depth: 0 @@ -54,7 +54,7 @@ jobs: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: @@ -62,13 +62,13 @@ jobs: distribution: temurin # vaadin 14 tests fail with node 18 - name: Set up Node - uses: actions/setup-node@v4 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: node-version: 16 # vaadin tests use pnpm - name: Cache pnpm modules - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: ~/.pnpm-store key: ${{ runner.os }}-test-cache-pnpm-modules @@ -96,7 +96,7 @@ jobs: # https://github.com/open-telemetry/opentelemetry-java/issues/4560 - os: ${{ startsWith(github.event.pull_request.base.ref, 'release/v') && 'windows-latest' || '' }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: @@ -114,7 +114,7 @@ jobs: # cache local patch outputs - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 #v4.2.4 with: path: | ~/.m2/repository/io/opentelemetry/ @@ -125,7 +125,7 @@ jobs: if: ${{ matrix.os != 'windows-latest' }} # Skip patch on windows as it is not possible to build opentelemetry-java on windows - name: Build with Gradle with Integration tests - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 if: ${{ matrix.os == 'ubuntu-latest' }} with: arguments: build integrationTests --stacktrace -PenableCoverage=true -PlocalDocker=true @@ -145,7 +145,7 @@ jobs: run: docker pull public.ecr.aws/docker/library/amazoncorretto:23-alpine - name: Run contract tests - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 if: ${{ matrix.os == 'ubuntu-latest' }} with: arguments: contractTests -PlocalDocker=true -i @@ -163,17 +163,17 @@ jobs: echo "ADOT_JAVA_VERSION=$(./gradlew printVersion -q )" >> $GITHUB_ENV - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 if: ${{ matrix.os == 'ubuntu-latest' }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 if: ${{ matrix.os == 'ubuntu-latest' }} - name: Build image for testing - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 if: ${{ matrix.os == 'ubuntu-latest' }} with: push: false @@ -197,17 +197,17 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ env.ADOT_JAVA_VERSION }}" - name: Build with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 if: ${{ matrix.os != 'ubuntu-latest' && (hashFiles('.github/patches/opentelemetry-java*.patch') == '' || matrix.os != 'windows-latest' ) }} # build on windows as well unless a patch exists with: arguments: build --stacktrace -PenableCoverage=true - - uses: codecov/codecov-action@v5 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 #v5.5.1 build-lambda: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - name: Setup Java uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 diff --git a/.github/workflows/pre-release-prepare.yml b/.github/workflows/pre-release-prepare.yml index 3459ef288d..4ef8a0ed0a 100644 --- a/.github/workflows/pre-release-prepare.yml +++ b/.github/workflows/pre-release-prepare.yml @@ -25,13 +25,13 @@ jobs: runs-on: ubuntu-latest steps: - name: Configure AWS credentials for BOT secrets - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ROLE_ARN_SECRETS_MANAGER }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Get Bot secrets - uses: aws-actions/aws-secretsmanager-get-secrets@v1 + uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802 #v2.0.10 id: bot_secrets with: secret-ids: | @@ -39,7 +39,7 @@ jobs: parse-json-secrets: true - name: Checkout main branch - uses: actions/checkout@v3 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: ref: 'main' token: ${{ env.BOT_TOKEN_GITHUB_RW_PATOKEN }} diff --git a/.github/workflows/publish-status.yml b/.github/workflows/publish-status.yml index 1efeb9c04d..5159e2bddf 100644 --- a/.github/workflows/publish-status.yml +++ b/.github/workflows/publish-status.yml @@ -37,7 +37,7 @@ jobs: contents: read steps: - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.roleArn }} aws-region: ${{ inputs.region }} diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml index 6a95f7b2df..54b9d36e77 100644 --- a/.github/workflows/release-build.yml +++ b/.github/workflows/release-build.yml @@ -24,7 +24,7 @@ jobs: environment: Release runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - name: Check main build status env: @@ -56,53 +56,53 @@ jobs: gpg_password: ${{ secrets.GPG_PASSPHRASE }} - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Build release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build integrationTests -PlocalDocker=true -Prelease.version=${{ github.event.inputs.version }} --stacktrace - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_PUBLIC_ECR_REGION }} - name: Log in to AWS ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: public.ecr.aws - name: Configure AWS Credentials for Private ECR - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN_RELEASE }} aws-region: ${{ env.AWS_PRIVATE_ECR_REGION }} - name: Log in to AWS private ECR - uses: docker/login-action@v3 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 #v3.5.0 with: registry: ${{ env.PRIVATE_REGISTRY }} - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #3.6.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 #v3.11.1 with: driver-opts: image=moby/buildkit:v0.15.1 - name: Build image for testing - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: false build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -116,7 +116,7 @@ jobs: run: .github/scripts/test-adot-javaagent-image.sh "${{ env.TEST_TAG }}" "${{ github.event.inputs.version }}" - name: Build and push image - uses: docker/build-push-action@v6 + uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 #v6.18.0 with: push: true build-args: "ADOT_JAVA_VERSION=${{ github.event.inputs.version }}" @@ -127,7 +127,7 @@ jobs: ${{ env.PRIVATE_REPOSITORY }}:v${{ github.event.inputs.version }} - name: Build and Publish release with Gradle - uses: gradle/gradle-build-action@v3 + uses: gradle/actions/setup-gradle@d9c87d481d55275bb5441eef3fe0e46805f9ef70 #v3.5.0 with: arguments: build final closeAndReleaseSonatypeStagingRepository -Prelease.version=${{ github.event.inputs.version }} --stacktrace env: diff --git a/.github/workflows/release-lambda.yml b/.github/workflows/release-lambda.yml index 6394e1dfd1..9bc4c73011 100644 --- a/.github/workflows/release-lambda.yml +++ b/.github/workflows/release-lambda.yml @@ -41,7 +41,7 @@ jobs: echo "aws_regions_json=${MATRIX}" >> $GITHUB_OUTPUT - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 with: @@ -54,7 +54,7 @@ jobs: ./build-layer.sh - name: Upload layer - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: aws-opentelemetry-java-layer.zip path: lambda-layer/build/distributions/aws-opentelemetry-java-layer.zip @@ -88,7 +88,7 @@ jobs: SECRET_KEY=${SECRET_KEY//-/_} echo "SECRET_KEY=${SECRET_KEY}" >> $GITHUB_ENV - - uses: aws-actions/configure-aws-credentials@v4.0.2 + - uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: role-to-assume: ${{ secrets[env.SECRET_KEY] }} role-duration-seconds: 1200 @@ -99,7 +99,7 @@ jobs: echo BUCKET_NAME=java-lambda-layer-${{ github.run_id }}-${{ matrix.aws_region }} | tee --append $GITHUB_ENV - name: download layer.zip - uses: actions/download-artifact@v5 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 with: name: aws-opentelemetry-java-layer.zip @@ -140,7 +140,7 @@ jobs: - name: upload layer arn artifact if: ${{ success() }} - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 #v4.6.2 with: name: ${{ env.LAYER_NAME }}-${{ matrix.aws_region }} path: ${{ env.LAYER_NAME }}/${{ matrix.aws_region }} @@ -155,10 +155,10 @@ jobs: needs: publish-prod steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v5 - - uses: hashicorp/setup-terraform@v2 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + - uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd #v3.1.2 - name: download layerARNs - uses: actions/download-artifact@v5 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 with: pattern: ${{ env.LAYER_NAME }}-* path: ${{ env.LAYER_NAME }} @@ -207,7 +207,7 @@ jobs: echo "}" >> ../layer_cdk cat ../layer_cdk - name: download aws-opentelemetry-java-layer.zip - uses: actions/download-artifact@v5 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 #v5.0.0 with: name: aws-opentelemetry-java-layer.zip - name: rename to layer.zip diff --git a/.github/workflows/release-udp-exporter.yml b/.github/workflows/release-udp-exporter.yml index 398a34a604..262683289d 100644 --- a/.github/workflows/release-udp-exporter.yml +++ b/.github/workflows/release-udp-exporter.yml @@ -26,7 +26,7 @@ jobs: needs: validate-udp-exporter-e2e-test steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 - name: Set up Java uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 diff --git a/.github/workflows/soak-testing.yml b/.github/workflows/soak-testing.yml index 3fd1173c85..2dcd07d709 100644 --- a/.github/workflows/soak-testing.yml +++ b/.github/workflows/soak-testing.yml @@ -63,7 +63,7 @@ jobs: run: | echo "TEST_DURATION_MINUTES=${{ github.event.inputs.test_duration_minutes || env.DEFAULT_TEST_DURATION_MINUTES }}" | tee --append $GITHUB_ENV; - name: Clone This Repo @ ${{ env.TARGET_SHA }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: ref: ${{ env.TARGET_SHA }} @@ -98,7 +98,7 @@ jobs: # MARK: - Run Performance Tests - name: Configure AWS Credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0 with: role-to-assume: ${{ secrets.AWS_ASSUME_ROLE_ARN }} role-duration-seconds: 21600 # 6 Hours @@ -110,7 +110,7 @@ jobs: aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws - name: Build Sample App locally directly to the Docker daemon - uses: burrunan/gradle-cache-action@v3 + uses: burrunan/gradle-cache-action@4a07779efc8120348ea6dfd35314bc30a586eb0f #v3.0.1 with: arguments: jibDockerBuild env: @@ -210,7 +210,7 @@ jobs: git checkout main; [[ $HAS_RESULTS_ALREADY == true ]] - name: Graph and Report Performance Test Averages result - uses: benchmark-action/github-action-benchmark@v1 + uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b #v1.20.7 continue-on-error: true id: check-failure-after-performance-tests with: @@ -230,7 +230,7 @@ jobs: gh-pages-branch: gh-pages benchmark-data-dir-path: soak-tests/per-commit-overall-results - name: Publish Issue if failed DURING Performance Tests - uses: JasonEtco/create-an-issue@v2 + uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 if: ${{ github.event_name == 'schedule' && steps.check-failure-during-performance-tests.outcome == 'failure' }} env: @@ -241,7 +241,7 @@ jobs: filename: .github/auto-issue-templates/failure-during-soak_tests.md update_existing: true - name: Publish Issue if failed AFTER Performance Tests - uses: JasonEtco/create-an-issue@v2 + uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 #v2.9.2 if: ${{ github.event_name == 'schedule' && steps.check-failure-after-performance-tests.outcome == 'failure' }} env: diff --git a/.github/workflows/stale-bot.yml b/.github/workflows/stale-bot.yml index 2104ad0b4f..49ddc47c00 100644 --- a/.github/workflows/stale-bot.yml +++ b/.github/workflows/stale-bot.yml @@ -22,7 +22,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Mark the issues/pr - uses: actions/stale@v9 + uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f #10.0.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} #Github workflow will add a temporary token when executing the workflow with: diff --git a/.github/workflows/udp-exporter-e2e-test.yml b/.github/workflows/udp-exporter-e2e-test.yml index ec227e689a..8a8b3aaa1b 100644 --- a/.github/workflows/udp-exporter-e2e-test.yml +++ b/.github/workflows/udp-exporter-e2e-test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Repo @ SHA - ${{ github.sha }} - uses: actions/checkout@v5 + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: Set up Java uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0 @@ -23,7 +23,7 @@ jobs: cache: 'gradle' - name: Configure AWS credentials for Testing Tracing - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0 with: role-to-assume: ${{ secrets.XRAY_UDP_EXPORTER_TEST_ROLE }} aws-region: 'us-east-1' From 9b6fcc9d0d5c25e85e50084c3ebcab15b2f01164 Mon Sep 17 00:00:00 2001 From: Thomas Pierce Date: Tue, 23 Sep 2025 16:32:33 -0700 Subject: [PATCH 69/83] feat: prevent versioned 3P GitHub actions in PR builds (#1212) Add validation step to require commit SHAs instead of version tags for third-party GitHub actions in workflow files. Repo config `Require actions to be pinned to a full-length commit SHA` will protect against this if we missed any others. ### Testing done * See: https://github.com/aws-observability/aws-otel-python-instrumentation/pull/475 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. --- .github/workflows/pr-build.yml | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 7d4a9545eb..2c390d9ba2 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -14,7 +14,7 @@ env: TEST_TAG: public.ecr.aws/aws-observability/adot-autoinstrumentation-java:test-v2 jobs: - changelog-check: + static-code-checks: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 @@ -22,6 +22,7 @@ jobs: fetch-depth: 0 - name: Check CHANGELOG + if: always() run: | # Check if PR is from workflows bot or dependabot if [[ "${{ github.event.pull_request.user.login }}" == "aws-application-signals-bot" ]]; then @@ -50,6 +51,24 @@ jobs: echo "It looks like you didn't add an entry to CHANGELOG.md. If this change affects the SDK behavior, please update CHANGELOG.md and link this PR in your entry. If this PR does not need a CHANGELOG entry, you can add the 'Skip Changelog' label to this PR." exit 1 + - name: Check for versioned GitHub actions + if: always() + run: | + # Get changed GitHub workflow/action files + CHANGED_FILES=$(git diff --name-only origin/${{ github.base_ref }}..HEAD | grep -E "^\.github/(workflows|actions)/.*\.ya?ml$" || true) + + if [ -n "$CHANGED_FILES" ]; then + # Check for any versioned actions, excluding comments and this validation script + VIOLATIONS=$(grep -Hn "uses:.*@v" $CHANGED_FILES | grep -v "grep.*uses:.*@v" | grep -v "#.*@v" || true) + if [ -n "$VIOLATIONS" ]; then + echo "Found versioned GitHub actions. Use commit SHAs instead:" + echo "$VIOLATIONS" + exit 1 + fi + fi + + echo "No versioned actions found in changed files" + testpatch: name: Test patches applied to dependencies runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core @@ -221,7 +240,7 @@ jobs: all-pr-checks-pass: runs-on: ubuntu-latest - needs: [changelog-check, testpatch, build, build-lambda] + needs: [static-code-checks, testpatch, build, build-lambda] if: always() steps: - name: Checkout to get workflow file From 383f29c994d0d5c121b28e79927d02c18ed8d3da Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Tue, 23 Sep 2025 19:09:45 -0700 Subject: [PATCH 70/83] add script to bump otel dependencies --- scripts/update_dependencies.py | 126 +++++++++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 scripts/update_dependencies.py diff --git a/scripts/update_dependencies.py b/scripts/update_dependencies.py new file mode 100644 index 0000000000..bc3c4864bf --- /dev/null +++ b/scripts/update_dependencies.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 + +import requests +import re +import sys + +def get_latest_instrumentation_version(): + """Get the latest version of opentelemetry-java-instrumentation from GitHub releases.""" + try: + response = requests.get( + 'https://api.github.com/repos/open-telemetry/opentelemetry-java-instrumentation/releases/latest', + timeout=30 + ) + response.raise_for_status() + + release_data = response.json() + tag_name = release_data['tag_name'] + + version = tag_name.lstrip('v') + return version + + except requests.RequestException as request_error: + print(f"Warning: Could not get latest instrumentation version: {request_error}") + return None + +def get_latest_maven_version(group_id, artifact_id): + """Get the latest version of a Maven artifact from Maven Central.""" + try: + response = requests.get( + f'https://search.maven.org/solrsearch/select?q=g:{group_id}+AND+a:{artifact_id}&rows=1&wt=json', + timeout=30 + ) + response.raise_for_status() + + data = response.json() + docs = data.get('response', {}).get('docs', []) + + if docs: + return docs[0]['latestVersion'] + else: + print(f"Warning: No versions found for {group_id}:{artifact_id}") + return None + + except requests.RequestException as request_error: + print(f"Warning: Could not get latest version for {group_id}:{artifact_id}: {request_error}") + return None + +def update_gradle_file(file_path): + """Update OpenTelemetry versions in build.gradle.kts.""" + try: + with open(file_path, 'r', encoding='utf-8') as input_file: + content = input_file.read() + + original_content = content + updated = False + + latest_instrumentation_version = get_latest_instrumentation_version() + if latest_instrumentation_version: + # Update otelVersion + otel_version_pattern = r'val otelVersion = "[^"]*"' + otel_version_replacement = f'val otelVersion = "{latest_instrumentation_version}"' + if re.search(otel_version_pattern, content): + new_content = re.sub(otel_version_pattern, otel_version_replacement, content) + if new_content != content: + content = new_content + updated = True + print(f"Updated otelVersion to {latest_instrumentation_version}") + + # Update otelSnapshotVersion (typically next minor version) + version_parts = latest_instrumentation_version.split('.') + if len(version_parts) >= 2: + next_minor = f"{version_parts[0]}.{int(version_parts[1]) + 1}.0" + otel_snapshot_pattern = r'val otelSnapshotVersion = "[^"]*"' + otel_snapshot_replacement = f'val otelSnapshotVersion = "{next_minor}"' + if re.search(otel_snapshot_pattern, content): + new_content = re.sub(otel_snapshot_pattern, otel_snapshot_replacement, content) + if new_content != content: + content = new_content + updated = True + print(f"Updated otelSnapshotVersion to {next_minor}") + + # Update hardcoded OpenTelemetry versions in dependencyLists + opentelemetry_packages = [ + ('io.opentelemetry.contrib', 'opentelemetry-aws-xray'), + ('io.opentelemetry.contrib', 'opentelemetry-aws-resources'), + ('io.opentelemetry', 'opentelemetry-extension-aws'), + ('io.opentelemetry.proto', 'opentelemetry-proto'), + ] + + for group_id, artifact_id in opentelemetry_packages: + latest_version = get_latest_maven_version(group_id, artifact_id) + if latest_version: + # Pattern to match the dependency line + pattern = rf'"{re.escape(group_id)}:{re.escape(artifact_id)}:[^"]*"' + replacement = f'"{group_id}:{artifact_id}:{latest_version}"' + + if re.search(pattern, content): + new_content = re.sub(pattern, replacement, content) + if new_content != content: + content = new_content + updated = True + print(f"Updated {group_id}:{artifact_id} to {latest_version}") + + if updated: + with open(file_path, 'w', encoding='utf-8') as output_file: + output_file.write(content) + print("Dependencies updated successfully") + return True + else: + print("No OpenTelemetry dependencies needed updating") + return False + + except (OSError, IOError) as file_error: + print(f"Error updating dependencies: {file_error}") + sys.exit(1) + +def main(): + gradle_file_path = 'dependencyManagement/build.gradle.kts' + + updated = update_gradle_file(gradle_file_path) + + if not updated: + print("No updates were made") + +if __name__ == '__main__': + main() From c4395a7d45a15b9de1db5b5c5c12175813e8b9eb Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Tue, 23 Sep 2025 19:12:06 -0700 Subject: [PATCH 71/83] add nightly build workflow --- .github/workflows/nightly-build.yml | 70 +++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .github/workflows/nightly-build.yml diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml new file mode 100644 index 0000000000..f4677500e5 --- /dev/null +++ b/.github/workflows/nightly-build.yml @@ -0,0 +1,70 @@ +name: Nightly Upstream Snapshot Build + +on: + schedule: + - cron: "21 3 * * *" + workflow_dispatch: + +env: + BRANCH_NAME: nightly-dependency-updates + +jobs: + update-and-create-pr: + runs-on: ubuntu-latest + outputs: + has_changes: ${{ steps.check_changes.outputs.has_changes }} + + steps: + - name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #5.0.0 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Check if nightly branch already exists + run: | + if git ls-remote --exit-code --heads origin "$BRANCH_NAME"; then + echo "Branch $BRANCH_NAME already exists. Skipping run to avoid conflicts." + echo "Please merge or close the existing PR before the next nightly run." + exit 1 + fi + + - name: Configure git and create branch + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git checkout -b "$BRANCH_NAME" + + - name: Set up Python + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c #v6.0.0 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: pip install requests + + - name: Update dependencies + run: python3 scripts/update_dependencies.py + + - name: Check for changes and create PR + id: check_changes + run: | + if git diff --quiet; then + echo "No dependency updates needed" + echo "has_changes=false" >> $GITHUB_OUTPUT + else + echo "Dependencies were updated" + echo "has_changes=true" >> $GITHUB_OUTPUT + + git add dependencyManagement/build.gradle.kts + git commit -m "chore: update OpenTelemetry dependencies to latest versions" + git push origin "$BRANCH_NAME" + + gh pr create \ + --title "Nightly dependency update: OpenTelemetry packages to latest versions" \ + --body "Automated update of OpenTelemetry dependencies to their latest available versions." \ + --base main \ + --head "$BRANCH_NAME" + fi + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From e12d79c9cb645d1bc8cee60ca9e571dea2bfbd31 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Tue, 23 Sep 2025 19:13:06 -0700 Subject: [PATCH 72/83] update main build to run on nightly build branch --- .github/workflows/main-build.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 622cba16e3..6618a3b2f5 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -4,6 +4,7 @@ on: branches: - main - "release/v*" + - nightly-dependency-updates workflow_dispatch: # be able to run the workflow on demand env: AWS_DEFAULT_REGION: us-east-1 @@ -268,7 +269,7 @@ jobs: name: "Publish Main Build Status" needs: [ build, e2e-test, contract-tests, application-signals-lambda-layer-build, application-signals-e2e-test ] runs-on: ubuntu-latest - if: always() + if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') steps: - name: Configure AWS Credentials for emitting metrics uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 From a821678504e588da1057a1fcbeee442e5cef8997 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 18:41:09 -0700 Subject: [PATCH 73/83] update contrib dependencies together in script --- scripts/update_dependencies.py | 57 ++++++++++++++++++++++++++++++++-- 1 file changed, 54 insertions(+), 3 deletions(-) diff --git a/scripts/update_dependencies.py b/scripts/update_dependencies.py index bc3c4864bf..2d4f84b520 100644 --- a/scripts/update_dependencies.py +++ b/scripts/update_dependencies.py @@ -23,6 +23,37 @@ def get_latest_instrumentation_version(): print(f"Warning: Could not get latest instrumentation version: {request_error}") return None +def get_latest_contrib_version(): + """Get the latest version of opentelemetry-java-contrib from GitHub releases.""" + try: + response = requests.get( + 'https://api.github.com/repos/open-telemetry/opentelemetry-java-contrib/releases', + timeout=30 + ) + response.raise_for_status() + + releases = response.json() + + # Find the latest stable release + for release in releases: + if release.get('prerelease', False): + continue # Skip pre-releases + + tag_name = release['tag_name'] + # Contrib releases are typically tagged as "v1.32.0" + version_match = re.match(r'^v?(\d+\.\d+\.\d+)$', tag_name) + if version_match: + version = version_match.group(1) + print(f"Found contrib version: {version}") + return version + + print("Warning: No stable contrib releases found") + return None + + except requests.RequestException as request_error: + print(f"Warning: Could not get latest contrib version: {request_error}") + return None + def get_latest_maven_version(group_id, artifact_id): """Get the latest version of a Maven artifact from Maven Central.""" try: @@ -79,15 +110,35 @@ def update_gradle_file(file_path): updated = True print(f"Updated otelSnapshotVersion to {next_minor}") - # Update hardcoded OpenTelemetry versions in dependencyLists - opentelemetry_packages = [ + # Get latest contrib version from GitHub + latest_contrib_version = get_latest_contrib_version() + + # Update contrib packages that are released together + contrib_packages = [ ('io.opentelemetry.contrib', 'opentelemetry-aws-xray'), ('io.opentelemetry.contrib', 'opentelemetry-aws-resources'), + ] + + for group_id, artifact_id in contrib_packages: + if latest_contrib_version: + # Pattern to match the dependency line + pattern = rf'"{re.escape(group_id)}:{re.escape(artifact_id)}:[^"]*"' + replacement = f'"{group_id}:{artifact_id}:{latest_contrib_version}"' + + if re.search(pattern, content): + new_content = re.sub(pattern, replacement, content) + if new_content != content: + content = new_content + updated = True + print(f"Updated {group_id}:{artifact_id} to {latest_contrib_version}") + + # Update remaining packages using Maven Central + other_packages = [ ('io.opentelemetry', 'opentelemetry-extension-aws'), ('io.opentelemetry.proto', 'opentelemetry-proto'), ] - for group_id, artifact_id in opentelemetry_packages: + for group_id, artifact_id in other_packages: latest_version = get_latest_maven_version(group_id, artifact_id) if latest_version: # Pattern to match the dependency line From ea7f37204c35a4fae6c9a6a297d3af8b32b1a913 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 18:55:30 -0700 Subject: [PATCH 74/83] add logic to link releases with breaking changes --- .github/workflows/nightly-build.yml | 25 +++++- scripts/find_breaking_changes.py | 118 ++++++++++++++++++++++++++++ 2 files changed, 141 insertions(+), 2 deletions(-) create mode 100644 scripts/find_breaking_changes.py diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index f4677500e5..25d5909a82 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -41,7 +41,14 @@ jobs: python-version: '3.11' - name: Install Python dependencies - run: pip install requests + run: pip install requests packaging + + - name: Generate breaking changes summary + id: breaking_changes + run: | + echo "BREAKING_CHANGES<> $GITHUB_OUTPUT + python3 scripts/find_breaking_changes.py + echo "EOF" >> $GITHUB_OUTPUT - name: Update dependencies run: python3 scripts/update_dependencies.py @@ -60,11 +67,25 @@ jobs: git commit -m "chore: update OpenTelemetry dependencies to latest versions" git push origin "$BRANCH_NAME" + - name: Create or update PR + if: steps.check_changes.outputs.has_changes == 'true' + run: | + PR_BODY="Automated update of OpenTelemetry dependencies to their latest available versions. + + **Upstream releases with breaking changes:** + ${{ steps.breaking_changes.outputs.BREAKING_CHANGES }}" + + if gh pr view "$BRANCH_NAME" --json state --jq '.state' 2>/dev/null | grep -q "OPEN"; then + echo "Open PR already exists, updating description..." + gh pr edit "$BRANCH_NAME" --body "$PR_BODY" + else + echo "Creating new PR..." gh pr create \ --title "Nightly dependency update: OpenTelemetry packages to latest versions" \ - --body "Automated update of OpenTelemetry dependencies to their latest available versions." \ + --body "$PR_BODY" \ --base main \ --head "$BRANCH_NAME" fi + fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/scripts/find_breaking_changes.py b/scripts/find_breaking_changes.py new file mode 100644 index 0000000000..59e0cf4541 --- /dev/null +++ b/scripts/find_breaking_changes.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 + +import re +import requests +import sys +from packaging import version + + +def get_current_versions(): + """Get current versions from build.gradle.kts.""" + try: + with open("dependencyManagement/build.gradle.kts", "r", encoding="utf-8") as file: + content = file.read() + + # Extract otelVersion + otel_version_match = re.search(r'val otelVersion = "([^"]*)"', content) + current_instrumentation_version = otel_version_match.group(1) if otel_version_match else None + + return current_instrumentation_version + + except (OSError, IOError) as error: + print(f"Error reading current versions: {error}") + return None + + +def get_releases_with_breaking_changes(repo, current_version, new_version): + """Get releases between current and new version that mention breaking changes.""" + try: + response = requests.get(f"https://api.github.com/repos/open-telemetry/{repo}/releases", timeout=30) + response.raise_for_status() + releases = response.json() + + breaking_releases = [] + + for release in releases: + try: + tag_name = release["tag_name"] + release_version = tag_name.lstrip("v") + + # Check if this release is between current and new version + if ( + version.parse(current_version) + < version.parse(release_version) + <= version.parse(new_version) + ): + + # Check if release notes have breaking changes as headers + body = release.get("body", "") + breaking_header_pattern = r'^#+.*breaking changes' + if re.search(breaking_header_pattern, body, re.IGNORECASE | re.MULTILINE): + breaking_releases.append( + { + "version": release_version, + "name": release["name"], + "url": release["html_url"], + "body": release.get("body", ""), + } + ) + except (ValueError, KeyError): + # Skip releases with invalid version formats or missing data + continue + + return breaking_releases + + except requests.RequestException as request_error: + print(f"Warning: Could not get releases for {repo}: {request_error}") + return [] + + +def main(): + current_instrumentation_version = get_current_versions() + + if not current_instrumentation_version: + print("Could not determine current versions") + sys.exit(1) + + # Get new versions from the update script + sys.path.append('scripts') + from update_dependencies import get_latest_instrumentation_version, get_latest_contrib_version + + new_instrumentation_version = get_latest_instrumentation_version() + new_contrib_version = get_latest_contrib_version() + + if not new_instrumentation_version: + print("Could not determine new versions") + sys.exit(1) + + print("Checking for breaking changes:") + print(f"Instrumentation: {current_instrumentation_version} → {new_instrumentation_version}") + if new_contrib_version: + print(f"Contrib: → {new_contrib_version}") + + # Check instrumentation repo for breaking changes + instrumentation_breaking = get_releases_with_breaking_changes( + "opentelemetry-java-instrumentation", current_instrumentation_version, new_instrumentation_version + ) + + # Generate breaking changes summary + breaking_changes_summary = [] + + if instrumentation_breaking: + breaking_changes_summary.append("**Breaking changes found in opentelemetry-java-instrumentation:**") + for release in instrumentation_breaking: + breaking_changes_summary.append(f"- [{release['name']}]({release['url']})") + + # Always add contrib release link (no breaking changes detection needed) + if new_contrib_version: + breaking_changes_summary.append("**Check contrib releases:**") + breaking_changes_summary.append("- [opentelemetry-java-contrib releases](https://github.com/open-telemetry/opentelemetry-java-contrib/releases)") + + if breaking_changes_summary: + print("\n" + "\n".join(breaking_changes_summary)) + else: + print("\nNo breaking changes detected") + + +if __name__ == "__main__": + main() From 8cf6aac7cc314f1302764ea6f0ef7761a074d219 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 18:57:52 -0700 Subject: [PATCH 75/83] Revert "update main build to run on nightly build branch" This reverts commit e12d79c9cb645d1bc8cee60ca9e571dea2bfbd31. --- .github/workflows/main-build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 6618a3b2f5..622cba16e3 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -4,7 +4,6 @@ on: branches: - main - "release/v*" - - nightly-dependency-updates workflow_dispatch: # be able to run the workflow on demand env: AWS_DEFAULT_REGION: us-east-1 @@ -269,7 +268,7 @@ jobs: name: "Publish Main Build Status" needs: [ build, e2e-test, contract-tests, application-signals-lambda-layer-build, application-signals-e2e-test ] runs-on: ubuntu-latest - if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') + if: always() steps: - name: Configure AWS Credentials for emitting metrics uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 From ed8294e32025a657df476142cd9d21e3873fd86d Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:03:26 -0700 Subject: [PATCH 76/83] fix branching logic and add metric --- .github/workflows/nightly-build.yml | 51 ++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 11 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 25d5909a82..cff54cd3d5 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -5,7 +5,13 @@ on: - cron: "21 3 * * *" workflow_dispatch: +permissions: + contents: write + pull-requests: write + id-token: write + env: + AWS_DEFAULT_REGION: us-east-1 BRANCH_NAME: nightly-dependency-updates jobs: @@ -21,19 +27,20 @@ jobs: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} - - name: Check if nightly branch already exists - run: | - if git ls-remote --exit-code --heads origin "$BRANCH_NAME"; then - echo "Branch $BRANCH_NAME already exists. Skipping run to avoid conflicts." - echo "Please merge or close the existing PR before the next nightly run." - exit 1 - fi - - - name: Configure git and create branch + - name: Configure git run: | git config --local user.email "action@github.com" git config --local user.name "GitHub Action" - git checkout -b "$BRANCH_NAME" + + - name: Check out dependency update branch + run: | + if git ls-remote --exit-code --heads origin "$BRANCH_NAME"; then + echo "Branch $BRANCH_NAME already exists, checking out..." + git checkout "$BRANCH_NAME" + else + echo "Branch $BRANCH_NAME does not exist, creating new branch..." + git checkout -b "$BRANCH_NAME" + fi - name: Set up Python uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c #v6.0.0 @@ -66,7 +73,8 @@ jobs: git add dependencyManagement/build.gradle.kts git commit -m "chore: update OpenTelemetry dependencies to latest versions" git push origin "$BRANCH_NAME" - + fi + - name: Create or update PR if: steps.check_changes.outputs.has_changes == 'true' run: | @@ -89,3 +97,24 @@ jobs: fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + publish-nightly-build-status: + name: "Publish Nightly Build Status" + needs: [update-and-create-pr] + runs-on: ubuntu-latest + if: always() + steps: + - name: Configure AWS Credentials for emitting metrics + uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 + with: + role-to-assume: ${{ secrets.MONITORING_ROLE_ARN }} + aws-region: ${{ env.AWS_DEFAULT_REGION }} + + - name: Publish nightly build status + run: | + # For now, just publish success since we don't have build validation yet + value="${{ needs.update-and-create-pr.result == 'success' && '0.0' || '1.0'}}" + aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ + --metric-name Failure \ + --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=nightly_build \ + --value $value From edcdb158df651b758b6336e89f2cc4944ee3bbba Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:08:08 -0700 Subject: [PATCH 77/83] add main build call and update metric --- .github/workflows/main-build.yml | 15 +++++++++++++++ .github/workflows/nightly-build.yml | 26 +++++++++++++++++++------- 2 files changed, 34 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main-build.yml b/.github/workflows/main-build.yml index 622cba16e3..eba1156676 100644 --- a/.github/workflows/main-build.yml +++ b/.github/workflows/main-build.yml @@ -1,5 +1,15 @@ name: Java Agent Main Build on: + workflow_call: + inputs: + caller-workflow-name: + required: true + type: string + ref: + description: 'The branch, tag or SHA to checkout' + required: false + type: string + default: '' push: branches: - main @@ -24,6 +34,8 @@ jobs: runs-on: aws-otel-java-instrumentation_ubuntu-latest_32-core steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 + with: + ref: ${{ inputs.ref || github.sha }} - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version @@ -58,6 +70,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 + ref: ${{ inputs.ref || github.sha }} - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version @@ -193,6 +206,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 + ref: ${{ inputs.ref || github.sha }} - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version: 23 @@ -233,6 +247,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0 with: fetch-depth: 0 + ref: ${{ inputs.ref || github.sha }} - uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 #v5.0.0 with: java-version-file: .java-version diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index cff54cd3d5..751152d322 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -98,9 +98,18 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + build-and-test: + needs: update-and-create-pr + if: needs.update-and-create-pr.outputs.has_changes == 'true' + uses: ./.github/workflows/main-build.yml + secrets: inherit + with: + caller-workflow-name: nightly-build + ref: nightly-dependency-updates + publish-nightly-build-status: name: "Publish Nightly Build Status" - needs: [update-and-create-pr] + needs: [update-and-create-pr, build-and-test] runs-on: ubuntu-latest if: always() steps: @@ -112,9 +121,12 @@ jobs: - name: Publish nightly build status run: | - # For now, just publish success since we don't have build validation yet - value="${{ needs.update-and-create-pr.result == 'success' && '0.0' || '1.0'}}" - aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ - --metric-name Failure \ - --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=nightly_build \ - --value $value + if [[ "${{ needs.build-and-test.result }}" == "skipped" ]]; then + echo "Build was skipped (no changes), not publishing metric" + else + value="${{ needs.build-and-test.result == 'success' && '0.0' || '1.0'}}" + aws cloudwatch put-metric-data --namespace 'ADOT/GitHubActions' \ + --metric-name Failure \ + --dimensions repository=${{ github.repository }},branch=${{ github.ref_name }},workflow=nightly_build \ + --value $value + fi From 3393cf74166e11dd0425a377df1425c1986f7fe7 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:08:38 -0700 Subject: [PATCH 78/83] add test trigger to workflow --- .github/workflows/nightly-build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 751152d322..215aa5b8c4 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -4,6 +4,9 @@ on: schedule: - cron: "21 3 * * *" workflow_dispatch: + push: + branches: + - zhaez/nightly-build permissions: contents: write From 27155e291911beefa3983b680390005c0e08a307 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:10:26 -0700 Subject: [PATCH 79/83] fix bug and job title --- .github/workflows/nightly-build.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index 215aa5b8c4..cc2ab83806 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -63,7 +63,7 @@ jobs: - name: Update dependencies run: python3 scripts/update_dependencies.py - - name: Check for changes and create PR + - name: Check for changes and commit id: check_changes run: | if git diff --quiet; then @@ -97,7 +97,6 @@ jobs: --base main \ --head "$BRANCH_NAME" fi - fi env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 735e6af9d0e550a1499bb72d4ce376aff55df062 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:11:44 -0700 Subject: [PATCH 80/83] fix credential for metric publishing --- .github/workflows/nightly-build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly-build.yml b/.github/workflows/nightly-build.yml index cc2ab83806..6abd6d0d61 100644 --- a/.github/workflows/nightly-build.yml +++ b/.github/workflows/nightly-build.yml @@ -118,7 +118,7 @@ jobs: - name: Configure AWS Credentials for emitting metrics uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 #v5.0.0 with: - role-to-assume: ${{ secrets.MONITORING_ROLE_ARN }} + role-to-assume: ${{ secrets.METRICS_ROLE_ARN }} aws-region: ${{ env.AWS_DEFAULT_REGION }} - name: Publish nightly build status From cdad72c4ba9b0014649db42c9792b516604556f9 Mon Sep 17 00:00:00 2001 From: Eric Zhang Date: Sun, 28 Sep 2025 19:22:25 -0700 Subject: [PATCH 81/83] fix regex matching for breaking changes --- scripts/draft-changelog.sh | 28 ++++++++++++++++++++++++++++ scripts/find_breaking_changes.py | 4 ++-- 2 files changed, 30 insertions(+), 2 deletions(-) create mode 100755 scripts/draft-changelog.sh diff --git a/scripts/draft-changelog.sh b/scripts/draft-changelog.sh new file mode 100755 index 0000000000..6ff8546ce3 --- /dev/null +++ b/scripts/draft-changelog.sh @@ -0,0 +1,28 @@ +#!/bin/bash -e + +# Find the latest release tag that's in the current branch's history +latest_tag="" +for tag in $(git tag -l "v*.*.*" --sort=-version:refname); do + if git merge-base --is-ancestor "$tag" HEAD; then + latest_tag="$tag" + break + fi +done + +if [[ -z $latest_tag ]]; then + echo "No release tags found in current branch history" + exit 1 +fi + +echo "# Changes since $latest_tag:" +echo + +# Generate changelog entries from commits since the tag +git log --reverse \ + --perl-regexp \ + --author='^(?!dependabot\[bot\] )(?!github-actions\[bot\] )' \ + --pretty=format:"- %s" \ + "$latest_tag..HEAD" \ + | grep -E '\(#[0-9]+\)$' \ + | grep -v '^- Post release ' \ + | sed -E 's,\(#([0-9]+)\)$, ([#\1](https://github.com/aws-observability/aws-otel-java-instrumentation/pull/\1)),' diff --git a/scripts/find_breaking_changes.py b/scripts/find_breaking_changes.py index 59e0cf4541..b4be0396db 100644 --- a/scripts/find_breaking_changes.py +++ b/scripts/find_breaking_changes.py @@ -46,8 +46,8 @@ def get_releases_with_breaking_changes(repo, current_version, new_version): # Check if release notes have breaking changes as headers body = release.get("body", "") - breaking_header_pattern = r'^#+.*breaking changes' - if re.search(breaking_header_pattern, body, re.IGNORECASE | re.MULTILINE): + breaking_header_pattern = r'^\s*#+.*Breaking changes' + if re.search(breaking_header_pattern, body, re.MULTILINE): breaking_releases.append( { "version": release_version, From ad69ca1b549b555a220131f94f8f7b83447345ff Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Mon, 29 Sep 2025 02:23:11 +0000 Subject: [PATCH 82/83] chore: update OpenTelemetry dependencies to latest versions --- dependencyManagement/build.gradle.kts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index cf5f3c2f26..07beb2b487 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,8 +27,8 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.18.1-adot1" -val otelSnapshotVersion = "2.19.0" +val otelVersion = "2.20.1" +val otelSnapshotVersion = "2.21.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" // All versions below are only used in testing and do not affect the released artifact. @@ -77,9 +77,9 @@ val dependencyLists = listOf( "commons-logging:commons-logging:1.2", "com.sparkjava:spark-core:2.9.4", "com.squareup.okhttp3:okhttp:4.12.0", - "io.opentelemetry.contrib:opentelemetry-aws-xray:1.48.0-adot1", - "io.opentelemetry.contrib:opentelemetry-aws-resources:1.48.0-alpha", - "io.opentelemetry.proto:opentelemetry-proto:1.0.0-alpha", + "io.opentelemetry.contrib:opentelemetry-aws-xray:1.50.0", + "io.opentelemetry.contrib:opentelemetry-aws-resources:1.50.0", + "io.opentelemetry.proto:opentelemetry-proto:1.7.0-alpha", "io.opentelemetry.javaagent:opentelemetry-javaagent:$otelJavaAgentVersion", "io.opentelemetry:opentelemetry-extension-aws:1.20.1", "net.bytebuddy:byte-buddy:1.14.10", From 161f7b46f05b42c85df0875cfb842115ed2b0b1b Mon Sep 17 00:00:00 2001 From: GitHub Action Date: Wed, 5 Nov 2025 01:47:42 +0000 Subject: [PATCH 83/83] chore: update OpenTelemetry dependencies to latest versions --- dependencyManagement/build.gradle.kts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dependencyManagement/build.gradle.kts b/dependencyManagement/build.gradle.kts index 07beb2b487..0b9d8e1797 100644 --- a/dependencyManagement/build.gradle.kts +++ b/dependencyManagement/build.gradle.kts @@ -27,8 +27,8 @@ data class DependencySet(val group: String, val version: String, val modules: Li val testSnapshots = rootProject.findProperty("testUpstreamSnapshots") == "true" // This is the version of the upstream instrumentation BOM -val otelVersion = "2.20.1" -val otelSnapshotVersion = "2.21.0" +val otelVersion = "2.21.0" +val otelSnapshotVersion = "2.22.0" val otelAlphaVersion = if (!testSnapshots) "$otelVersion-alpha" else "$otelSnapshotVersion-alpha-SNAPSHOT" val otelJavaAgentVersion = if (!testSnapshots) otelVersion else "$otelSnapshotVersion-SNAPSHOT" // All versions below are only used in testing and do not affect the released artifact. @@ -77,8 +77,8 @@ val dependencyLists = listOf( "commons-logging:commons-logging:1.2", "com.sparkjava:spark-core:2.9.4", "com.squareup.okhttp3:okhttp:4.12.0", - "io.opentelemetry.contrib:opentelemetry-aws-xray:1.50.0", - "io.opentelemetry.contrib:opentelemetry-aws-resources:1.50.0", + "io.opentelemetry.contrib:opentelemetry-aws-xray:1.51.0", + "io.opentelemetry.contrib:opentelemetry-aws-resources:1.51.0", "io.opentelemetry.proto:opentelemetry-proto:1.7.0-alpha", "io.opentelemetry.javaagent:opentelemetry-javaagent:$otelJavaAgentVersion", "io.opentelemetry:opentelemetry-extension-aws:1.20.1",