1515 */
1616import { Attributes , DiagLogger , Span , Tracer } from '@opentelemetry/api' ;
1717import { RequestMetadata , ServiceExtension } from './ServiceExtension' ;
18+ import {
19+ ATTR_GEN_AI_SYSTEM ,
20+ ATTR_GEN_AI_OPERATION_NAME ,
21+ ATTR_GEN_AI_REQUEST_MODEL ,
22+ ATTR_GEN_AI_REQUEST_MAX_TOKENS ,
23+ ATTR_GEN_AI_REQUEST_TEMPERATURE ,
24+ ATTR_GEN_AI_REQUEST_TOP_P ,
25+ ATTR_GEN_AI_REQUEST_STOP_SEQUENCES ,
26+ ATTR_GEN_AI_USAGE_INPUT_TOKENS ,
27+ ATTR_GEN_AI_USAGE_OUTPUT_TOKENS ,
28+ ATTR_GEN_AI_RESPONSE_FINISH_REASONS ,
29+ GEN_AI_OPERATION_NAME_VALUE_CHAT ,
30+ GEN_AI_SYSTEM_VALUE_AWS_BEDROCK ,
31+ } from '../semconv' ;
1832import {
1933 AwsSdkInstrumentationConfig ,
2034 NormalizedRequest ,
2135 NormalizedResponse ,
2236} from '../types' ;
2337
24- // Unstable attributes to define inline.
25- const SEMATTRS_GEN_AI_SYSTEM = 'gen_ai.system' ;
26- const SEMATTRS_GEN_AI_OPERATION_NAME = 'gen_ai.operation.name' ;
27- const SEMATTRS_GEN_AI_REQUEST_MODEL = 'gen_ai.request.model' ;
28- const SEMATTRS_GEN_AI_REQUEST_MAX_TOKENS = 'gen_ai.request.max_tokens' ;
29- const SEMATTRS_GEN_AI_REQUEST_TEMPERATURE = 'gen_ai.request.temperature' ;
30- const SEMATTRS_GEN_AI_REQUEST_TOP_P = 'gen_ai.request.top_p' ;
31- const SEMATTRS_GEN_AI_REQUEST_STOP_SEQUENCES = 'gen_ai.request.stop_sequences' ;
32- const SEMATTRS_GEN_AI_USAGE_INPUT_TOKENS = 'gen_ai.usage.input_tokens' ;
33- const SEMATTRS_GEN_AI_USAGE_OUTPUT_TOKENS = 'gen_ai.usage.output_tokens' ;
34- const SEMATTRS_GEN_AI_RESPONSE_FINISH_REASONS =
35- 'gen_ai.response.finish_reasons' ;
36-
37- export class BedrockRuntimeExtension implements ServiceExtension {
38+ export class BedrockRuntimeServiceExtension implements ServiceExtension {
3839 requestPreSpanHook (
3940 request : NormalizedRequest ,
4041 config : AwsSdkInstrumentationConfig ,
4142 diag : DiagLogger
4243 ) : RequestMetadata {
4344 let spanName : string | undefined ;
4445 const spanAttributes : Attributes = {
45- [ SEMATTRS_GEN_AI_SYSTEM ] : 'bedrock' ,
46+ [ ATTR_GEN_AI_SYSTEM ] : GEN_AI_SYSTEM_VALUE_AWS_BEDROCK ,
4647 } ;
4748
4849 switch ( request . commandName ) {
4950 case 'Converse' :
50- spanAttributes [ SEMATTRS_GEN_AI_OPERATION_NAME ] = 'chat' ;
51- spanName = 'chat' ;
51+ spanAttributes [ ATTR_GEN_AI_OPERATION_NAME ] =
52+ GEN_AI_OPERATION_NAME_VALUE_CHAT ;
53+ spanName = GEN_AI_OPERATION_NAME_VALUE_CHAT ;
5254 break ;
5355 }
5456
5557 const modelId = request . commandInput . modelId ;
5658 if ( modelId ) {
57- spanAttributes [ SEMATTRS_GEN_AI_REQUEST_MODEL ] = modelId ;
59+ spanAttributes [ ATTR_GEN_AI_REQUEST_MODEL ] = modelId ;
5860 if ( spanName ) {
5961 spanName += ` ${ modelId } ` ;
6062 }
@@ -64,16 +66,16 @@ export class BedrockRuntimeExtension implements ServiceExtension {
6466 if ( inferenceConfig ) {
6567 const { maxTokens, temperature, topP, stopSequences } = inferenceConfig ;
6668 if ( maxTokens !== undefined ) {
67- spanAttributes [ SEMATTRS_GEN_AI_REQUEST_MAX_TOKENS ] = maxTokens ;
69+ spanAttributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] = maxTokens ;
6870 }
6971 if ( temperature !== undefined ) {
70- spanAttributes [ SEMATTRS_GEN_AI_REQUEST_TEMPERATURE ] = temperature ;
72+ spanAttributes [ ATTR_GEN_AI_REQUEST_TEMPERATURE ] = temperature ;
7173 }
7274 if ( topP !== undefined ) {
73- spanAttributes [ SEMATTRS_GEN_AI_REQUEST_TOP_P ] = topP ;
75+ spanAttributes [ ATTR_GEN_AI_REQUEST_TOP_P ] = topP ;
7476 }
7577 if ( stopSequences !== undefined ) {
76- spanAttributes [ SEMATTRS_GEN_AI_REQUEST_STOP_SEQUENCES ] = stopSequences ;
78+ spanAttributes [ ATTR_GEN_AI_REQUEST_STOP_SEQUENCES ] = stopSequences ;
7779 }
7880 }
7981
@@ -97,14 +99,14 @@ export class BedrockRuntimeExtension implements ServiceExtension {
9799 const { stopReason, usage } = response . data ;
98100 const { inputTokens, outputTokens } = usage ;
99101 if ( inputTokens !== undefined ) {
100- span . setAttribute ( SEMATTRS_GEN_AI_USAGE_INPUT_TOKENS , inputTokens ) ;
102+ span . setAttribute ( ATTR_GEN_AI_USAGE_INPUT_TOKENS , inputTokens ) ;
101103 }
102104 if ( outputTokens !== undefined ) {
103- span . setAttribute ( SEMATTRS_GEN_AI_USAGE_OUTPUT_TOKENS , outputTokens ) ;
105+ span . setAttribute ( ATTR_GEN_AI_USAGE_OUTPUT_TOKENS , outputTokens ) ;
104106 }
105107
106108 if ( stopReason !== undefined ) {
107- span . setAttribute ( SEMATTRS_GEN_AI_RESPONSE_FINISH_REASONS , [ stopReason ] ) ;
109+ span . setAttribute ( ATTR_GEN_AI_RESPONSE_FINISH_REASONS , [ stopReason ] ) ;
108110 }
109111 }
110112}
0 commit comments