Skip to content

Commit 50598d1

Browse files
committed
Merge branch 'potel-base' into potel-base-run-all-tests
2 parents 141159d + 9e64b1d commit 50598d1

File tree

1 file changed

+9
-6
lines changed

1 file changed

+9
-6
lines changed

tests/integrations/openai/test_openai.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,8 @@ def test_nonstreaming_chat_completion(
8383
assert span["op"] == "ai.chat_completions.create.openai"
8484

8585
if send_default_pii and include_prompts:
86-
assert "hello" in span["data"]["ai.input_messages"]["content"]
87-
assert "the model response" in span["data"]["ai.responses"]["content"]
86+
assert '"content": "hello"' in span["data"]["ai.input_messages"]
87+
assert '"content": "the model response"' in span["data"]["ai.responses"]
8888
else:
8989
assert "ai.input_messages" not in span["data"]
9090
assert "ai.responses" not in span["data"]
@@ -125,8 +125,8 @@ async def test_nonstreaming_chat_completion_async(
125125
assert span["op"] == "ai.chat_completions.create.openai"
126126

127127
if send_default_pii and include_prompts:
128-
assert "hello" in span["data"]["ai.input_messages"]["content"]
129-
assert "the model response" in span["data"]["ai.responses"]["content"]
128+
assert '"content": "hello"' in span["data"]["ai.input_messages"]
129+
assert '"content": "the model response"' in span["data"]["ai.responses"]
130130
else:
131131
assert "ai.input_messages" not in span["data"]
132132
assert "ai.responses" not in span["data"]
@@ -218,7 +218,7 @@ def test_streaming_chat_completion(
218218
assert span["op"] == "ai.chat_completions.create.openai"
219219

220220
if send_default_pii and include_prompts:
221-
assert "hello" in span["data"]["ai.input_messages"]["content"]
221+
assert '"content": "hello"' in span["data"]["ai.input_messages"]
222222
assert "hello world" in span["data"]["ai.responses"]
223223
else:
224224
assert "ai.input_messages" not in span["data"]
@@ -314,7 +314,7 @@ async def test_streaming_chat_completion_async(
314314
assert span["op"] == "ai.chat_completions.create.openai"
315315

316316
if send_default_pii and include_prompts:
317-
assert "hello" in span["data"]["ai.input_messages"]["content"]
317+
assert '"content": "hello"' in span["data"]["ai.input_messages"]
318318
assert "hello world" in span["data"]["ai.responses"]
319319
else:
320320
assert "ai.input_messages" not in span["data"]
@@ -330,6 +330,7 @@ async def test_streaming_chat_completion_async(
330330
pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly
331331

332332

333+
@pytest.mark.forked
333334
def test_bad_chat_completion(sentry_init, capture_events):
334335
sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0)
335336
events = capture_events()
@@ -460,6 +461,7 @@ async def test_embeddings_create_async(
460461
assert span["measurements"]["ai_total_tokens_used"]["value"] == 30
461462

462463

464+
@pytest.mark.forked
463465
@pytest.mark.parametrize(
464466
"send_default_pii, include_prompts",
465467
[(True, True), (True, False), (False, True), (False, False)],
@@ -487,6 +489,7 @@ def test_embeddings_create_raises_error(
487489
assert event["level"] == "error"
488490

489491

492+
@pytest.mark.forked
490493
@pytest.mark.asyncio
491494
@pytest.mark.parametrize(
492495
"send_default_pii, include_prompts",

0 commit comments

Comments
 (0)