@@ -886,10 +886,6 @@ def test_set_output_data_with_input_json_delta(sentry_init):
886886
887887def test_anthropic_message_role_mapping (sentry_init , capture_events ):
888888 """Test that Anthropic integration properly maps message roles like 'ai' to 'assistant'"""
889-
890-
891- def test_anthropic_message_truncation (sentry_init , capture_events ):
892- """Test that large messages are truncated properly in Anthropic integration."""
893889 sentry_init (
894890 integrations = [AnthropicIntegration (include_prompts = True )],
895891 traces_sample_rate = 1.0 ,
@@ -898,9 +894,8 @@ def test_anthropic_message_truncation(sentry_init, capture_events):
898894 events = capture_events ()
899895
900896 client = Anthropic (api_key = "z" )
901-
902- def mock_messages_create (* args , ** kwargs ):
903- return Message (
897+ client .messages ._post = mock .Mock (
898+ return_value = Message (
904899 id = "msg_1" ,
905900 content = [TextBlock (text = "Hi there!" , type = "text" )],
906901 model = "claude-3-opus" ,
@@ -910,8 +905,7 @@ def mock_messages_create(*args, **kwargs):
910905 type = "message" ,
911906 usage = Usage (input_tokens = 10 , output_tokens = 5 ),
912907 )
913-
914- client .messages ._post = mock .Mock (return_value = mock_messages_create ())
908+ )
915909
916910 test_messages = [
917911 {"role" : "system" , "content" : "You are helpful." },
@@ -926,7 +920,8 @@ def mock_messages_create(*args, **kwargs):
926920 )
927921
928922 (event ,) = events
929- span = event ["spans" ][0 ]
923+ (span ,) = event ["spans" ]
924+
930925 assert span ["op" ] == "gen_ai.chat"
931926 assert SPANDATA .GEN_AI_REQUEST_MESSAGES in span ["data" ]
932927
@@ -942,39 +937,49 @@ def mock_messages_create(*args, **kwargs):
942937
943938 roles = [msg ["role" ] for msg in stored_messages ]
944939 assert "ai" not in roles
940+
941+
942+ def test_anthropic_message_truncation (sentry_init , capture_events ):
943+ """Test that large messages are truncated properly in Anthropic integration."""
944+ sentry_init (
945+ integrations = [AnthropicIntegration (include_prompts = True )],
946+ traces_sample_rate = 1.0 ,
947+ send_default_pii = True ,
948+ )
949+ events = capture_events ()
950+
945951 client = Anthropic (api_key = "test-api-key" )
952+ client .messages ._post = mock .Mock (
953+ return_value = Message (
954+ id = "test" ,
955+ content = [TextBlock (text = "Hello" , type = "text" )],
956+ model = "claude-3" ,
957+ role = "assistant" ,
958+ type = "message" ,
959+ usage = Usage (input_tokens = 10 , output_tokens = 20 ),
960+ )
961+ )
946962
947963 large_content = (
948964 "This is a very long message that will exceed our size limits. " * 1000
949- ) # ~64KB
965+ )
950966 large_messages = [
951- {"role" : "system" , "content" : "You are a helpful assistant." },
952967 {"role" : "user" , "content" : large_content },
953968 {"role" : "assistant" , "content" : large_content },
954969 {"role" : "user" , "content" : large_content },
955970 ]
956971
957- with mock .patch .object (client .messages , "create" ) as mock_create :
958- mock_create .return_value = Message (
959- id = "test" ,
960- content = [TextBlock (text = "Hello" , type = "text" )],
961- model = "claude-3" ,
962- role = "assistant" ,
963- type = "message" ,
964- usage = Usage (input_tokens = 10 , output_tokens = 20 ),
972+ with start_transaction (name = "anthropic tx" ):
973+ client .messages .create (
974+ model = "claude-3-sonnet-20240229" ,
975+ messages = large_messages ,
976+ max_tokens = 100 ,
965977 )
966978
967- with start_transaction (name = "anthropic tx" ):
968- client .messages .create (
969- model = "claude-3-sonnet-20240229" ,
970- messages = large_messages ,
971- max_tokens = 100 ,
972- )
973-
974979 (event ,) = events
975- span = event ["spans" ][0 ]
976- assert SPANDATA .GEN_AI_REQUEST_MESSAGES in span ["data" ]
980+ (span ,) = event ["spans" ]
977981
982+ assert SPANDATA .GEN_AI_REQUEST_MESSAGES in span ["data" ]
978983 messages_data = span ["data" ][SPANDATA .GEN_AI_REQUEST_MESSAGES ]
979984 assert isinstance (messages_data , str )
980985
@@ -996,32 +1001,32 @@ def test_anthropic_single_large_message_preservation(sentry_init, capture_events
9961001 events = capture_events ()
9971002
9981003 client = Anthropic (api_key = "test-api-key" )
999-
1000- huge_content = (
1001- "This is an extremely long message that will definitely exceed size limits. "
1002- * 2000
1003- )
1004- messages = [{"role" : "user" , "content" : huge_content }]
1005-
1006- with mock .patch .object (client .messages , "create" ) as mock_create :
1007- mock_create .return_value = Message (
1004+ client .messages ._post = mock .Mock (
1005+ return_value = Message (
10081006 id = "test" ,
10091007 content = [TextBlock (text = "Hello" , type = "text" )],
10101008 model = "claude-3" ,
10111009 role = "assistant" ,
10121010 type = "message" ,
10131011 usage = Usage (input_tokens = 100 , output_tokens = 50 ),
10141012 )
1013+ )
10151014
1016- with start_transaction (name = "anthropic tx" ):
1017- client .messages .create (
1018- model = "claude-3-sonnet-20240229" ,
1019- messages = messages ,
1020- max_tokens = 100 ,
1021- )
1015+ huge_content = (
1016+ "This is an extremely long message that will definitely exceed size limits. "
1017+ * 2000
1018+ )
1019+ messages = [{"role" : "user" , "content" : huge_content }]
1020+
1021+ with start_transaction (name = "anthropic tx" ):
1022+ client .messages .create (
1023+ model = "claude-3-sonnet-20240229" ,
1024+ messages = messages ,
1025+ max_tokens = 100 ,
1026+ )
10221027
10231028 (event ,) = events
1024- span = event ["spans" ][ 0 ]
1029+ ( span ,) = event ["spans" ]
10251030
10261031 assert SPANDATA .GEN_AI_REQUEST_MESSAGES in span ["data" ]
10271032 messages_data = span ["data" ][SPANDATA .GEN_AI_REQUEST_MESSAGES ]
0 commit comments