-
Notifications
You must be signed in to change notification settings - Fork 786
Update the botocore
instrumentation to emit events via the logs API instead of the deprecated events API
#3624
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 5 commits
f491545
0008618
948053d
e406a18
47270d1
1b83355
2bc37c7
a3da8b2
7a06210
a64b308
afb7a7e
82a8b35
ead2af9
765eaa5
17f3073
a4d65f5
7f4f596
4310e63
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -457,9 +457,9 @@ def before_service_call( | |
|
||
messages = self._get_request_messages() | ||
for message in messages: | ||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
for event in message_to_event(message, capture_content): | ||
event_logger.emit(event) | ||
logger.emit(event) | ||
|
||
if span.is_recording(): | ||
operation_name = span.attributes.get(GEN_AI_OPERATION_NAME, "") | ||
|
@@ -501,12 +501,12 @@ def _converse_on_success( | |
|
||
# In case of an early stream closure, the result may not contain outputs | ||
if self._stream_has_output_content(result): | ||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_converse(result, capture_content) | ||
# this path is used by streaming apis, in that case we are already out of the span | ||
# context so need to add the span context manually | ||
span_ctx = span.get_span_context() | ||
event_logger.emit( | ||
logger.emit( | ||
choice.to_choice_event( | ||
|
||
trace_id=span_ctx.trace_id, | ||
span_id=span_ctx.span_id, | ||
|
@@ -729,11 +729,11 @@ def _handle_amazon_titan_response( | |
[result["completionReason"]], | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_amazon_titan( | ||
response_body, capture_content | ||
) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
metrics = instrumentor_context.metrics | ||
metrics_attributes = self._extract_metrics_attributes() | ||
|
@@ -791,9 +791,9 @@ def _handle_amazon_nova_response( | |
|
||
# In case of an early stream closure, the result may not contain outputs | ||
if self._stream_has_output_content(response_body): | ||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_converse(response_body, capture_content) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
metrics = instrumentor_context.metrics | ||
metrics_attributes = self._extract_metrics_attributes() | ||
|
@@ -848,11 +848,11 @@ def _handle_anthropic_claude_response( | |
GEN_AI_RESPONSE_FINISH_REASONS, [response_body["stop_reason"]] | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_anthropic_claude( | ||
response_body, capture_content | ||
) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
metrics = instrumentor_context.metrics | ||
metrics_attributes = self._extract_metrics_attributes() | ||
|
@@ -903,11 +903,11 @@ def _handle_cohere_command_r_response( | |
[response_body["finish_reason"]], | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_cohere_command_r( | ||
response_body, capture_content | ||
) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
def _handle_cohere_command_response( | ||
self, | ||
|
@@ -929,11 +929,11 @@ def _handle_cohere_command_response( | |
[generations["finish_reason"]], | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_cohere_command( | ||
response_body, capture_content | ||
) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
def _handle_meta_llama_response( | ||
self, | ||
|
@@ -956,9 +956,9 @@ def _handle_meta_llama_response( | |
GEN_AI_RESPONSE_FINISH_REASONS, [response_body["stop_reason"]] | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_meta_llama(response_body, capture_content) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
def _handle_mistral_ai_response( | ||
self, | ||
|
@@ -979,11 +979,11 @@ def _handle_mistral_ai_response( | |
GEN_AI_RESPONSE_FINISH_REASONS, [outputs["stop_reason"]] | ||
) | ||
|
||
event_logger = instrumentor_context.event_logger | ||
logger = instrumentor_context.logger | ||
choice = _Choice.from_invoke_mistral_mistral( | ||
response_body, capture_content | ||
) | ||
event_logger.emit(choice.to_choice_event()) | ||
logger.emit(choice.to_choice_event()) | ||
|
||
def on_error( | ||
self, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -22,7 +22,8 @@ | |
from botocore.eventstream import EventStream, EventStreamError | ||
from wrapt import ObjectProxy | ||
|
||
from opentelemetry._events import Event | ||
from opentelemetry._logs import LogRecord | ||
from opentelemetry.context import get_current | ||
from opentelemetry.instrumentation.botocore.environment_variables import ( | ||
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT, | ||
) | ||
|
@@ -492,7 +493,7 @@ def extract_tool_results( | |
|
||
def message_to_event( | ||
message: dict[str, Any], capture_content: bool | ||
) -> Iterator[Event]: | ||
) -> Iterator[LogRecord]: | ||
attributes = {GEN_AI_SYSTEM: GenAiSystemValues.AWS_BEDROCK.value} | ||
role = message.get("role") | ||
content = message.get("content") | ||
|
@@ -507,16 +508,18 @@ def message_to_event( | |
elif role == "user": | ||
# in case of tool calls we send one tool event for tool call and one for the user event | ||
for tool_body in extract_tool_results(message, capture_content): | ||
yield Event( | ||
name="gen_ai.tool.message", | ||
yield LogRecord( | ||
event_name="gen_ai.tool.message", | ||
attributes=attributes, | ||
body=tool_body, | ||
context=get_current(), | ||
) | ||
|
||
yield Event( | ||
name=f"gen_ai.{role}.message", | ||
yield LogRecord( | ||
event_name=f"gen_ai.{role}.message", | ||
attributes=attributes, | ||
body=body if body else None, | ||
context=get_current(), | ||
) | ||
|
||
|
||
|
@@ -617,11 +620,12 @@ def _to_body_dict(self) -> dict[str, Any]: | |
"message": self.message, | ||
} | ||
|
||
def to_choice_event(self, **event_kwargs) -> Event: | ||
def to_choice_event(self, **event_kwargs) -> LogRecord: | ||
attributes = {GEN_AI_SYSTEM: GenAiSystemValues.AWS_BEDROCK.value} | ||
return Event( | ||
name="gen_ai.choice", | ||
return LogRecord( | ||
event_name="gen_ai.choice", | ||
attributes=attributes, | ||
body=self._to_body_dict(), | ||
**event_kwargs, | ||
context=get_current(), | ||
|
||
) |
Uh oh!
There was an error while loading. Please reload this page.