@@ -218,25 +218,29 @@ def test_basic_integration(mock_client):
218218 client = Anthropic (posthog_client = mock_client )
219219 client .messages .create (
220220 model = "claude-3-opus-20240229" ,
221- messages = [{"role" : "user" , "content" : "You must always answer with 'Bar'. " }],
221+ messages = [{"role" : "user" , "content" : "Foo " }],
222222 max_tokens = 1 ,
223223 temperature = 0 ,
224224 posthog_distinct_id = "test-id" ,
225225 posthog_properties = {"foo" : "bar" },
226+ system = "You must always answer with 'Bar'." ,
226227 )
227228
228229 assert mock_client .capture .call_count == 1
229230
230231 call_args = mock_client .capture .call_args [1 ]
231232 props = call_args ["properties" ]
232-
233233 assert call_args ["distinct_id" ] == "test-id"
234234 assert call_args ["event" ] == "$ai_generation"
235235 assert props ["$ai_provider" ] == "anthropic"
236236 assert props ["$ai_model" ] == "claude-3-opus-20240229"
237- assert props ["$ai_input" ] == [{"role" : "user" , "content" : "You must always answer with 'Bar'." }]
237+ assert props ["$ai_input" ] == [
238+ {"role" : "system" , "content" : "You must always answer with 'Bar'." },
239+ {"role" : "user" , "content" : "Foo" },
240+ ]
238241 assert props ["$ai_output_choices" ][0 ]["role" ] == "assistant"
239- assert props ["$ai_input_tokens" ] == 16
242+ assert props ["$ai_output_choices" ][0 ]["content" ] == "Bar"
243+ assert props ["$ai_input_tokens" ] == 18
240244 assert props ["$ai_output_tokens" ] == 1
241245 assert props ["$ai_http_status" ] == 200
242246 assert props ["foo" ] == "bar"
@@ -273,6 +277,54 @@ async def test_basic_async_integration(mock_client):
273277 assert isinstance (props ["$ai_latency" ], float )
274278
275279
280+ def test_streaming_system_prompt (mock_client , mock_anthropic_stream ):
281+ with patch ("anthropic.resources.Messages.create" , return_value = mock_anthropic_stream ):
282+ client = Anthropic (api_key = "test-key" , posthog_client = mock_client )
283+ response = client .messages .create (
284+ model = "claude-3-opus-20240229" ,
285+ system = "Foo" ,
286+ messages = [{"role" : "user" , "content" : "Bar" }],
287+ stream = True ,
288+ )
289+
290+ # Consume the stream
291+ list (response )
292+
293+ # Wait a bit to ensure the capture is called
294+ time .sleep (0.1 )
295+ assert mock_client .capture .call_count == 1
296+
297+ call_args = mock_client .capture .call_args [1 ]
298+ props = call_args ["properties" ]
299+ assert props ["$ai_input" ] == [{"role" : "system" , "content" : "Foo" }, {"role" : "user" , "content" : "Bar" }]
300+
301+
302+ @pytest .mark .skipif (not ANTHROPIC_API_KEY , reason = "ANTHROPIC_API_KEY is not set" )
303+ async def test_async_streaming_system_prompt (mock_client , mock_anthropic_stream ):
304+ client = AsyncAnthropic (posthog_client = mock_client )
305+ response = await client .messages .create (
306+ model = "claude-3-opus-20240229" ,
307+ system = "You must always answer with 'Bar'." ,
308+ messages = [{"role" : "user" , "content" : "Foo" }],
309+ stream = True ,
310+ max_tokens = 1 ,
311+ )
312+
313+ # Consume the stream
314+ [c async for c in response ]
315+
316+ # Wait a bit to ensure the capture is called
317+ time .sleep (0.1 )
318+ assert mock_client .capture .call_count == 1
319+
320+ call_args = mock_client .capture .call_args [1 ]
321+ props = call_args ["properties" ]
322+
323+ assert props ["$ai_input" ] == [
324+ {"role" : "system" , "content" : "You must always answer with 'Bar'." },
325+ {"role" : "user" , "content" : "Foo" },
326+ ]
327+
276328def test_core_model_params (mock_client , mock_anthropic_response ):
277329 with patch ("anthropic.resources.Messages.create" , return_value = mock_anthropic_response ):
278330 client = Anthropic (api_key = "test-key" , posthog_client = mock_client )
@@ -287,12 +339,9 @@ def test_core_model_params(mock_client, mock_anthropic_response):
287339 )
288340
289341 assert response == mock_anthropic_response
290- assert mock_client .capture .call_count == 1
291-
292- call_args = mock_client .capture .call_args [1 ]
293- props = call_args ["properties" ]
342+ props = mock_client .capture .call_args [1 ]["properties" ]
294343 assert props ["$ai_model_parameters" ] == {"temperature" : 0.5 , "max_tokens" : 100 , "stream" : False }
295344 assert props ["$ai_temperature" ] == 0.5
296345 assert props ["$ai_max_tokens" ] == 100
297346 assert props ["$ai_stream" ] == False
298- assert props ["foo" ] == "bar"
347+ assert props ["foo" ] == "bar"
0 commit comments