Skip to content

Commit 77e37d9

Browse files
authored
[https://nvbugs/5753250][infra] Further waive all tests in _test_openai_responses.py (#10176)
Signed-off-by: Bo Li <22713281+bobboli@users.noreply.github.com>
1 parent 2ce785f commit 77e37d9

File tree

1 file changed

+5
-0
lines changed

1 file changed

+5
-0
lines changed

tests/unittest/llmapi/apps/_test_openai_responses.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ async def test_reasoning(client: openai.AsyncOpenAI, model: str):
9494
check_reponse(response, "test_reasoning: ")
9595

9696

97+
@pytest.mark.skip(reason="https://nvbugs/5753250")
9798
@pytest.mark.asyncio(loop_scope="module")
9899
async def test_reasoning_effort(client: openai.AsyncOpenAI, model: str):
99100
for effort in ["low", "medium", "high"]:
@@ -106,6 +107,7 @@ async def test_reasoning_effort(client: openai.AsyncOpenAI, model: str):
106107
check_reponse(response, f"test_reasoning_effort_{effort}: ")
107108

108109

110+
@pytest.mark.skip(reason="https://nvbugs/5753250")
109111
@pytest.mark.asyncio(loop_scope="module")
110112
async def test_chat(client: openai.AsyncOpenAI, model: str):
111113
response = await client.responses.create(model=model,
@@ -150,6 +152,7 @@ def get_current_weather(location: str, format: str = "celsius") -> dict:
150152
return {"sunny": True, "temperature": 20 if format == "celsius" else 68}
151153

152154

155+
@pytest.mark.skip(reason="https://nvbugs/5753250")
153156
@pytest.mark.asyncio(loop_scope="module")
154157
async def test_tool_calls(client: openai.AsyncOpenAI, model: str):
155158
if model.startswith("DeepSeek-R1"):
@@ -201,6 +204,7 @@ async def test_tool_calls(client: openai.AsyncOpenAI, model: str):
201204
check_tool_calling(response, False, "test_tool_calls: ")
202205

203206

207+
@pytest.mark.skip(reason="https://nvbugs/5753250")
204208
@pytest.mark.asyncio(loop_scope="module")
205209
async def test_streaming(client: openai.AsyncOpenAI, model: str):
206210
stream = await client.responses.create(
@@ -222,6 +226,7 @@ async def test_streaming(client: openai.AsyncOpenAI, model: str):
222226
assert full_reasoning_response
223227

224228

229+
@pytest.mark.skip(reason="https://nvbugs/5753250")
225230
@pytest.mark.asyncio(loop_scope="module")
226231
async def test_streaming_tool_call(client: openai.AsyncOpenAI, model: str):
227232
if model.startswith("DeepSeek-R1"):

0 commit comments

Comments
 (0)