@@ -268,6 +268,8 @@ def extract_attributes(self, attributes: _AttributeMapT):
268268
269269 if "amazon.titan" in model_id :
270270 self ._extract_titan_attributes (attributes , request_body )
271+ if "amazon.nova" in model_id :
272+ self ._extract_nova_attributes (attributes , request_body )
271273 elif "anthropic.claude" in model_id :
272274 self ._extract_claude_attributes (attributes , request_body )
273275 elif "meta.llama" in model_id :
@@ -288,6 +290,12 @@ def _extract_titan_attributes(self, attributes, request_body):
288290 self ._set_if_not_none (attributes , GEN_AI_REQUEST_TOP_P , config .get ("topP" ))
289291 self ._set_if_not_none (attributes , GEN_AI_REQUEST_MAX_TOKENS , config .get ("maxTokenCount" ))
290292
293+ def _extract_nova_attributes (self , attributes , request_body ):
294+ config = request_body .get ("inferenceConfig" , {})
295+ self ._set_if_not_none (attributes , GEN_AI_REQUEST_TEMPERATURE , config .get ("temperature" ))
296+ self ._set_if_not_none (attributes , GEN_AI_REQUEST_TOP_P , config .get ("top_p" ))
297+ self ._set_if_not_none (attributes , GEN_AI_REQUEST_MAX_TOKENS , config .get ("max_new_tokens" ))
298+
291299 def _extract_claude_attributes (self , attributes , request_body ):
292300 self ._set_if_not_none (attributes , GEN_AI_REQUEST_MAX_TOKENS , request_body .get ("max_tokens" ))
293301 self ._set_if_not_none (attributes , GEN_AI_REQUEST_TEMPERATURE , request_body .get ("temperature" ))
@@ -342,6 +350,8 @@ def on_success(self, span: Span, result: Dict[str, Any]):
342350 response_body = json .loads (telemetry_content .decode ("utf-8" ))
343351 if "amazon.titan" in model_id :
344352 self ._handle_amazon_titan_response (span , response_body )
353+ if "amazon.nova" in model_id :
354+ self ._handle_amazon_nova_response (span , response_body )
345355 elif "anthropic.claude" in model_id :
346356 self ._handle_anthropic_claude_response (span , response_body )
347357 elif "meta.llama" in model_id :
@@ -374,6 +384,14 @@ def _handle_amazon_titan_response(self, span: Span, response_body: Dict[str, Any
374384 span .set_attribute (GEN_AI_USAGE_OUTPUT_TOKENS , result ["tokenCount" ])
375385 if "completionReason" in result :
376386 span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [result ["completionReason" ]])
387+
388+ def _handle_amazon_nova_response (self , span : Span , response_body : Dict [str , Any ]):
389+ if "inputTokenCount" in response_body :
390+ span .set_attribute (GEN_AI_USAGE_INPUT_TOKENS , response_body ["inputTokenCount" ])
391+ if "outputTokenCount" in response_body :
392+ span .set_attribute (GEN_AI_USAGE_OUTPUT_TOKENS , response_body ["outputTokenCount" ])
393+ if "stopReason" in response_body :
394+ span .set_attribute (GEN_AI_RESPONSE_FINISH_REASONS , [response_body ["stopReason" ]])
377395
378396 # pylint: disable=no-self-use
379397 def _handle_anthropic_claude_response (self , span : Span , response_body : Dict [str , Any ]):
0 commit comments