@@ -120,7 +120,7 @@ def test_basic_chat_chain(mock_client, stream):
120120 {"role" : "system" , "content" : "You are a helpful assistant." },
121121 {"role" : "user" , "content" : "Who won the world series in 2020?" },
122122 ]
123- assert props ["$ai_output " ] == {
123+ assert props ["$ai_output_choices " ] == {
124124 "choices" : [{"role" : "assistant" , "content" : "The Los Angeles Dodgers won the World Series in 2020." }]
125125 }
126126 assert props ["$ai_input_tokens" ] == 10
@@ -165,7 +165,7 @@ async def test_async_basic_chat_chain(mock_client, stream):
165165 {"role" : "system" , "content" : "You are a helpful assistant." },
166166 {"role" : "user" , "content" : "Who won the world series in 2020?" },
167167 ]
168- assert props ["$ai_output " ] == {
168+ assert props ["$ai_output_choices " ] == {
169169 "choices" : [{"role" : "assistant" , "content" : "The Los Angeles Dodgers won the World Series in 2020." }]
170170 }
171171 assert props ["$ai_input_tokens" ] == 10
@@ -200,7 +200,7 @@ def test_basic_llm_chain(mock_client, Model, stream):
200200 assert "$ai_model" in props
201201 assert "$ai_provider" in props
202202 assert props ["$ai_input" ] == ["Who won the world series in 2020?" ]
203- assert props ["$ai_output " ] == {"choices" : ["The Los Angeles Dodgers won the World Series in 2020." ]}
203+ assert props ["$ai_output_choices " ] == {"choices" : ["The Los Angeles Dodgers won the World Series in 2020." ]}
204204 assert props ["$ai_http_status" ] == 200
205205 assert props ["$ai_trace_id" ] is not None
206206 assert isinstance (props ["$ai_latency" ], float )
@@ -231,7 +231,7 @@ async def test_async_basic_llm_chain(mock_client, Model, stream):
231231 assert "$ai_model" in props
232232 assert "$ai_provider" in props
233233 assert props ["$ai_input" ] == ["Who won the world series in 2020?" ]
234- assert props ["$ai_output " ] == {"choices" : ["The Los Angeles Dodgers won the World Series in 2020." ]}
234+ assert props ["$ai_output_choices " ] == {"choices" : ["The Los Angeles Dodgers won the World Series in 2020." ]}
235235 assert props ["$ai_http_status" ] == 200
236236 assert props ["$ai_trace_id" ] is not None
237237 assert isinstance (props ["$ai_latency" ], float )
@@ -258,7 +258,7 @@ def test_trace_id_for_multiple_chains(mock_client):
258258 assert "$ai_model" in first_call_props
259259 assert "$ai_provider" in first_call_props
260260 assert first_call_props ["$ai_input" ] == [{"role" : "user" , "content" : "Foo" }]
261- assert first_call_props ["$ai_output " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
261+ assert first_call_props ["$ai_output_choices " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
262262 assert first_call_props ["$ai_http_status" ] == 200
263263 assert first_call_props ["$ai_trace_id" ] is not None
264264 assert isinstance (first_call_props ["$ai_latency" ], float )
@@ -270,7 +270,7 @@ def test_trace_id_for_multiple_chains(mock_client):
270270 assert "$ai_model" in second_call_props
271271 assert "$ai_provider" in second_call_props
272272 assert second_call_props ["$ai_input" ] == [{"role" : "assistant" , "content" : "Bar" }]
273- assert second_call_props ["$ai_output " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
273+ assert second_call_props ["$ai_output_choices " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
274274 assert second_call_props ["$ai_http_status" ] == 200
275275 assert second_call_props ["$ai_trace_id" ] is not None
276276 assert isinstance (second_call_props ["$ai_latency" ], float )
@@ -338,7 +338,7 @@ def test_metadata(mock_client):
338338 assert first_call_props ["$ai_trace_id" ] == "test-trace-id"
339339 assert first_call_props ["foo" ] == "bar"
340340 assert first_call_props ["$ai_input" ] == [{"role" : "user" , "content" : "Foo" }]
341- assert first_call_props ["$ai_output " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
341+ assert first_call_props ["$ai_output_choices " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
342342 assert first_call_props ["$ai_http_status" ] == 200
343343 assert isinstance (first_call_props ["$ai_latency" ], float )
344344
@@ -392,7 +392,7 @@ def test_openai_error(mock_client):
392392 props = args ["properties" ]
393393 assert props ["$ai_http_status" ] == 401
394394 assert props ["$ai_input" ] == [{"role" : "user" , "content" : "Foo" }]
395- assert "$ai_output " not in props
395+ assert "$ai_output_choices " not in props
396396
397397
398398@pytest .mark .skipif (not OPENAI_API_KEY , reason = "OpenAI API key not set" )
@@ -443,7 +443,7 @@ def test_openai_chain(mock_client):
443443 {"role" : "system" , "content" : 'You must always answer with "Bar".' },
444444 {"role" : "user" , "content" : "Foo" },
445445 ]
446- assert first_call_props ["$ai_output " ] == {
446+ assert first_call_props ["$ai_output_choices " ] == {
447447 "choices" : [
448448 {
449449 "role" : "assistant" ,
@@ -486,7 +486,7 @@ def test_openai_captures_multiple_generations(mock_client):
486486 {"role" : "system" , "content" : 'You must always answer with "Bar".' },
487487 {"role" : "user" , "content" : "Foo" },
488488 ]
489- assert first_call_props ["$ai_output " ] == {
489+ assert first_call_props ["$ai_output_choices " ] == {
490490 "choices" : [
491491 {
492492 "role" : "assistant" ,
@@ -544,7 +544,7 @@ def test_openai_streaming(mock_client):
544544 {"role" : "system" , "content" : 'You must always answer with "Bar".' },
545545 {"role" : "user" , "content" : "Foo" },
546546 ]
547- assert first_call_props ["$ai_output " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
547+ assert first_call_props ["$ai_output_choices " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
548548 assert first_call_props ["$ai_http_status" ] == 200
549549 assert first_call_props ["$ai_input_tokens" ] == 20
550550 assert first_call_props ["$ai_output_tokens" ] == 1
@@ -576,7 +576,7 @@ async def test_async_openai_streaming(mock_client):
576576 {"role" : "system" , "content" : 'You must always answer with "Bar".' },
577577 {"role" : "user" , "content" : "Foo" },
578578 ]
579- assert first_call_props ["$ai_output " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
579+ assert first_call_props ["$ai_output_choices " ] == {"choices" : [{"role" : "assistant" , "content" : "Bar" }]}
580580 assert first_call_props ["$ai_http_status" ] == 200
581581 assert first_call_props ["$ai_input_tokens" ] == 20
582582 assert first_call_props ["$ai_output_tokens" ] == 1
@@ -630,7 +630,7 @@ def test_privacy_mode_local(mock_client):
630630 assert mock_client .capture .call_count == 1
631631 call = mock_client .capture .call_args [1 ]
632632 assert call ["properties" ]["$ai_input" ] is None
633- assert call ["properties" ]["$ai_output " ] is None
633+ assert call ["properties" ]["$ai_output_choices " ] is None
634634
635635
636636def test_privacy_mode_global (mock_client ):
@@ -649,4 +649,4 @@ def test_privacy_mode_global(mock_client):
649649 assert mock_client .capture .call_count == 1
650650 call = mock_client .capture .call_args [1 ]
651651 assert call ["properties" ]["$ai_input" ] is None
652- assert call ["properties" ]["$ai_output " ] is None
652+ assert call ["properties" ]["$ai_output_choices " ] is None
0 commit comments