We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 54ea620 commit 8ea8ae4Copy full SHA for 8ea8ae4
libs/partners/openai/tests/unit_tests/llms/test_base.py
@@ -108,3 +108,13 @@ def test_stream_response_to_generation_chunk() -> None:
108
assert chunk == GenerationChunk(
109
text="", generation_info={"finish_reason": None, "logprobs": None}
110
)
111
+
112
113
+def test_generate_streaming_multiple_prompts_error() -> None:
114
+ """Ensures ValueError when streaming=True and multiple prompts."""
115
+ llm = OpenAI(streaming=True)
116
117
+ with pytest.raises(
118
+ ValueError, match="Cannot stream results with multiple prompts\\."
119
+ ):
120
+ llm._generate(["foo", "bar"])
0 commit comments