11from functools import wraps
2+ import json
23from typing import TYPE_CHECKING
34
45import sentry_sdk
@@ -112,18 +113,23 @@ def _set_input_data(span, kwargs, integration):
112113 and should_send_default_pii ()
113114 and integration .include_prompts
114115 ):
115- set_data_normalized (span , SPANDATA .GEN_AI_REQUEST_MESSAGES , messages )
116+ set_data_normalized (
117+ span , SPANDATA .GEN_AI_REQUEST_MESSAGES , safe_serialize (messages )
118+ )
119+
120+ set_data_normalized (
121+ span , SPANDATA .GEN_AI_RESPONSE_STREAMING , kwargs .get ("stream" , False )
122+ )
116123
117124 kwargs_keys_to_attributes = {
118125 "max_tokens" : SPANDATA .GEN_AI_REQUEST_MAX_TOKENS ,
119126 "model" : SPANDATA .GEN_AI_REQUEST_MODEL ,
120- "stream" : SPANDATA .GEN_AI_RESPONSE_STREAMING ,
121127 "temperature" : SPANDATA .GEN_AI_REQUEST_TEMPERATURE ,
122128 "top_p" : SPANDATA .GEN_AI_REQUEST_TOP_P ,
123129 }
124130 for key , attribute in kwargs_keys_to_attributes .items ():
125131 value = kwargs .get (key )
126- if value is not NOT_GIVEN or value is not None :
132+ if value is not NOT_GIVEN and value is not None :
127133 set_data_normalized (span , attribute , value )
128134
129135 # Input attributes: Tools
@@ -141,14 +147,19 @@ def _set_output_data(
141147 input_tokens ,
142148 output_tokens ,
143149 content_blocks ,
144- finish_span = True ,
150+ finish_span = False ,
145151):
146152 # type: (Span, AnthropicIntegration, str | None, int | None, int | None, list[Any], bool) -> None
147153 """
148154 Set output data for the span based on the AI response."""
149155 span .set_data (SPANDATA .GEN_AI_RESPONSE_MODEL , model )
150156 if should_send_default_pii () and integration .include_prompts :
151- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , content_blocks )
157+ set_data_normalized (
158+ span ,
159+ SPANDATA .GEN_AI_RESPONSE_TEXT ,
160+ json .dumps (content_blocks ),
161+ unpack = False ,
162+ )
152163
153164 record_token_usage (
154165 span ,
@@ -196,7 +207,9 @@ def _sentry_patched_create_common(f, *args, **kwargs):
196207 getattr (result , "model" , None ),
197208 input_tokens ,
198209 output_tokens ,
199- content_blocks = result .content ,
210+ content_blocks = [
211+ content_block .to_dict () for content_block in result .content
212+ ],
200213 finish_span = True ,
201214 )
202215
@@ -225,7 +238,7 @@ def new_iterator():
225238 model = model ,
226239 input_tokens = input_tokens ,
227240 output_tokens = output_tokens ,
228- content_blocks = content_blocks ,
241+ content_blocks = [{ "text" : "" . join ( content_blocks ), "type" : "text" }] ,
229242 finish_span = True ,
230243 )
231244
@@ -250,7 +263,7 @@ async def new_iterator_async():
250263 model = model ,
251264 input_tokens = input_tokens ,
252265 output_tokens = output_tokens ,
253- content_blocks = content_blocks ,
266+ content_blocks = [{ "text" : "" . join ( content_blocks ), "type" : "text" }] ,
254267 finish_span = True ,
255268 )
256269
0 commit comments