|
1 | | -import json |
2 | 1 | import pytest |
3 | 2 | from openai import AsyncOpenAI, OpenAI, AsyncStream, Stream, OpenAIError |
4 | 3 | from openai.types import CompletionUsage, CreateEmbeddingResponse, Embedding |
@@ -144,11 +143,8 @@ def test_nonstreaming_chat_completion( |
144 | 143 | assert span["op"] == "gen_ai.chat" |
145 | 144 |
|
146 | 145 | if send_default_pii and include_prompts: |
147 | | - assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]["content"] |
148 | | - assert ( |
149 | | - "the model response" |
150 | | - in json.loads(span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT])[0]["content"] |
151 | | - ) |
| 146 | + assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES] |
| 147 | + assert "the model response" in span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT] |
152 | 148 | else: |
153 | 149 | assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"] |
154 | 150 | assert SPANDATA.GEN_AI_RESPONSE_TEXT not in span["data"] |
@@ -189,11 +185,8 @@ async def test_nonstreaming_chat_completion_async( |
189 | 185 | assert span["op"] == "gen_ai.chat" |
190 | 186 |
|
191 | 187 | if send_default_pii and include_prompts: |
192 | | - assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]["content"] |
193 | | - assert ( |
194 | | - "the model response" |
195 | | - in json.loads(span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT])[0]["content"] |
196 | | - ) |
| 188 | + assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES] |
| 189 | + assert "the model response" in span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT] |
197 | 190 | else: |
198 | 191 | assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"] |
199 | 192 | assert SPANDATA.GEN_AI_RESPONSE_TEXT not in span["data"] |
@@ -285,7 +278,7 @@ def test_streaming_chat_completion( |
285 | 278 | assert span["op"] == "gen_ai.chat" |
286 | 279 |
|
287 | 280 | if send_default_pii and include_prompts: |
288 | | - assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]["content"] |
| 281 | + assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES] |
289 | 282 | assert "hello world" in span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT] |
290 | 283 | else: |
291 | 284 | assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"] |
@@ -381,7 +374,7 @@ async def test_streaming_chat_completion_async( |
381 | 374 | assert span["op"] == "gen_ai.chat" |
382 | 375 |
|
383 | 376 | if send_default_pii and include_prompts: |
384 | | - assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES]["content"] |
| 377 | + assert "hello" in span["data"][SPANDATA.GEN_AI_REQUEST_MESSAGES] |
385 | 378 | assert "hello world" in span["data"][SPANDATA.GEN_AI_RESPONSE_TEXT] |
386 | 379 | else: |
387 | 380 | assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"] |
|
0 commit comments