@@ -45,10 +45,7 @@ def setup_once():
4545 if version < (0 , 16 ):
4646 raise DidNotEnable ("anthropic 0.16 or newer required." )
4747
48- if version >= (0 , 27 , 0 ):
49- Messages .create = _wrap_message_create (Messages .create )
50- else :
51- Messages .create = _wrap_message_create_old (Messages .create )
48+ Messages .create = _wrap_message_create (Messages .create )
5249
5350
5451def _capture_exception (exc ):
@@ -182,106 +179,3 @@ def new_iterator():
182179 return result
183180
184181 return _sentry_patched_create
185-
186-
187- def _wrap_message_create_old (f ):
188- # type: (Any) -> Any
189- @wraps (f )
190- def _sentry_patched_create (* args , ** kwargs ):
191- # type: (*Any, **Any) -> Any
192- integration = sentry_sdk .get_client ().get_integration (AnthropicIntegration )
193-
194- if integration is None or "messages" not in kwargs :
195- return f (* args , ** kwargs )
196-
197- try :
198- iter (kwargs ["messages" ])
199- except TypeError :
200- return f (* args , ** kwargs )
201-
202- messages = list (kwargs ["messages" ])
203- model = kwargs .get ("model" )
204-
205- span = sentry_sdk .start_span (
206- op = OP .ANTHROPIC_MESSAGES_CREATE ,
207- name = "Anthropic messages create" ,
208- origin = AnthropicIntegration .origin ,
209- )
210- span .__enter__ ()
211-
212- try :
213- result = f (* args , ** kwargs )
214- except Exception as exc :
215- _capture_exception (exc )
216- span .__exit__ (None , None , None )
217- raise exc from None
218-
219- with capture_internal_exceptions ():
220- span .set_data (SPANDATA .AI_MODEL_ID , model )
221- span .set_data (SPANDATA .AI_STREAMING , False )
222- if should_send_default_pii () and integration .include_prompts :
223- span .set_data (SPANDATA .AI_INPUT_MESSAGES , messages )
224- if hasattr (result , "content" ):
225- if should_send_default_pii () and integration .include_prompts :
226- span .set_data (
227- SPANDATA .AI_RESPONSES ,
228- list (
229- map (
230- lambda message : {
231- "type" : message .type ,
232- "text" : message .text ,
233- },
234- result .content ,
235- )
236- ),
237- )
238- _calculate_token_usage (result , span )
239- span .__exit__ (None , None , None )
240- elif hasattr (result , "_iterator" ):
241- old_iterator = result ._iterator
242-
243- def new_iterator ():
244- # type: () -> Iterator[MessageStreamEvent]
245- input_tokens = 0
246- output_tokens = 0
247- content_blocks = []
248- with capture_internal_exceptions ():
249- for event in old_iterator :
250- if hasattr (event , "type" ):
251- if event .type == "message_start" :
252- usage = event .message .usage
253- input_tokens += usage .input_tokens
254- output_tokens += usage .output_tokens
255- elif event .type == "content_block_start" :
256- pass
257- elif event .type == "content_block_delta" :
258- content_blocks .append (event .delta .text )
259- elif event .type == "content_block_stop" :
260- pass
261- elif event .type == "message_delta" :
262- output_tokens += event .usage .output_tokens
263- elif event .type == "message_stop" :
264- continue
265- yield event
266-
267- if should_send_default_pii () and integration .include_prompts :
268- complete_message = "" .join (content_blocks )
269- span .set_data (
270- SPANDATA .AI_RESPONSES ,
271- [{"type" : "text" , "text" : complete_message }],
272- )
273- total_tokens = input_tokens + output_tokens
274- record_token_usage (
275- span , input_tokens , output_tokens , total_tokens
276- )
277- span .set_data (SPANDATA .AI_STREAMING , True )
278- span .__exit__ (None , None , None )
279-
280- result ._iterator = new_iterator ()
281- else :
282- span .set_data ("unknown_response" , True )
283- span .__exit__ (None , None , None )
284-
285- return result
286-
287- return _sentry_patched_create
0 commit comments