diff --git a/libs/partners/openai/tests/unit_tests/llms/test_base.py b/libs/partners/openai/tests/unit_tests/llms/test_base.py index f0b027f9b7b3b..bae96ab7ca3b7 100644 --- a/libs/partners/openai/tests/unit_tests/llms/test_base.py +++ b/libs/partners/openai/tests/unit_tests/llms/test_base.py @@ -108,3 +108,13 @@ def test_stream_response_to_generation_chunk() -> None: assert chunk == GenerationChunk( text="", generation_info={"finish_reason": None, "logprobs": None} ) + + +def test_generate_streaming_multiple_prompts_error() -> None: + """Ensures ValueError when streaming=True and multiple prompts.""" + llm = OpenAI(streaming=True) + + with pytest.raises( + ValueError, match="Cannot stream results with multiple prompts\\." + ): + llm._generate(["foo", "bar"])