@@ -101,14 +101,9 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
101101 case 'ConverseStream' :
102102 return this . requestPreSpanHookConverse ( request , config , diag , true ) ;
103103 case 'InvokeModel' :
104- return this . requestPreSpanHookInvokeModel ( request , config , diag ) ;
104+ return this . requestPreSpanHookInvokeModel ( request , config , diag , false ) ;
105105 case 'InvokeModelWithResponseStream' :
106- return this . requestPreSpanHookInvokeModelWithResponseStream (
107- request ,
108- config ,
109- diag ,
110- true
111- ) ;
106+ return this . requestPreSpanHookInvokeModel ( request , config , diag , true ) ;
112107 }
113108
114109 return {
@@ -164,7 +159,8 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
164159 private requestPreSpanHookInvokeModel (
165160 request : NormalizedRequest ,
166161 config : AwsSdkInstrumentationConfig ,
167- diag : DiagLogger
162+ diag : DiagLogger ,
163+ isStream : boolean
168164 ) : RequestMetadata {
169165 let spanName : string | undefined ;
170166 const spanAttributes : Attributes = {
@@ -319,87 +315,8 @@ export class BedrockRuntimeServiceExtension implements ServiceExtension {
319315 return {
320316 spanName,
321317 isIncoming : false ,
322- spanAttributes,
323- } ;
324- }
325-
326- private requestPreSpanHookInvokeModelWithResponseStream (
327- request : NormalizedRequest ,
328- config : AwsSdkInstrumentationConfig ,
329- diag : DiagLogger ,
330- isStream : boolean
331- ) : RequestMetadata {
332- let spanName : string | undefined ;
333- const spanAttributes : Attributes = {
334- [ ATTR_GEN_AI_SYSTEM ] : GEN_AI_SYSTEM_VALUE_AWS_BEDROCK ,
335- // add operation name for InvokeModel API
336- } ;
337-
338- const modelId = request . commandInput ?. modelId ;
339- if ( modelId ) {
340- spanAttributes [ ATTR_GEN_AI_REQUEST_MODEL ] = modelId ;
341- }
342-
343- if ( request . commandInput ?. body ) {
344- const requestBody = JSON . parse ( request . commandInput . body ) ;
345- if ( modelId . includes ( 'amazon.titan' ) ) {
346- if ( requestBody . textGenerationConfig ?. temperature !== undefined ) {
347- spanAttributes [ ATTR_GEN_AI_REQUEST_TEMPERATURE ] =
348- requestBody . textGenerationConfig . temperature ;
349- }
350- if ( requestBody . textGenerationConfig ?. topP !== undefined ) {
351- spanAttributes [ ATTR_GEN_AI_REQUEST_TOP_P ] =
352- requestBody . textGenerationConfig . topP ;
353- }
354- if ( requestBody . textGenerationConfig ?. maxTokenCount !== undefined ) {
355- spanAttributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
356- requestBody . textGenerationConfig . maxTokenCount ;
357- }
358- if ( requestBody . textGenerationConfig ?. stopSequences !== undefined ) {
359- spanAttributes [ ATTR_GEN_AI_REQUEST_STOP_SEQUENCES ] =
360- requestBody . textGenerationConfig . stopSequences ;
361- }
362- } else if ( modelId . includes ( 'anthropic.claude' ) ) {
363- if ( requestBody . max_tokens !== undefined ) {
364- spanAttributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
365- requestBody . max_tokens ;
366- }
367- if ( requestBody . temperature !== undefined ) {
368- spanAttributes [ ATTR_GEN_AI_REQUEST_TEMPERATURE ] =
369- requestBody . temperature ;
370- }
371- if ( requestBody . top_p !== undefined ) {
372- spanAttributes [ ATTR_GEN_AI_REQUEST_TOP_P ] = requestBody . top_p ;
373- }
374- if ( requestBody . stop_sequences !== undefined ) {
375- spanAttributes [ ATTR_GEN_AI_REQUEST_STOP_SEQUENCES ] =
376- requestBody . stop_sequences ;
377- }
378- } else if ( modelId . includes ( 'amazon.nova' ) ) {
379- if ( requestBody . inferenceConfig ?. temperature !== undefined ) {
380- spanAttributes [ ATTR_GEN_AI_REQUEST_TEMPERATURE ] =
381- requestBody . inferenceConfig . temperature ;
382- }
383- if ( requestBody . inferenceConfig ?. top_p !== undefined ) {
384- spanAttributes [ ATTR_GEN_AI_REQUEST_TOP_P ] =
385- requestBody . inferenceConfig . top_p ;
386- }
387- if ( requestBody . inferenceConfig ?. max_new_tokens !== undefined ) {
388- spanAttributes [ ATTR_GEN_AI_REQUEST_MAX_TOKENS ] =
389- requestBody . inferenceConfig . max_new_tokens ;
390- }
391- if ( requestBody . inferenceConfig ?. stopSequences !== undefined ) {
392- spanAttributes [ ATTR_GEN_AI_REQUEST_STOP_SEQUENCES ] =
393- requestBody . inferenceConfig . stopSequences ;
394- }
395- }
396- }
397-
398- return {
399- spanName,
400- isIncoming : false ,
401- spanAttributes,
402318 isStream,
319+ spanAttributes,
403320 } ;
404321 }
405322
0 commit comments