Skip to content

Commit 13e2ecb

Browse files
committed
fix: improve error handling for unexpected prompt types and update stream test assertions
1 parent 390c1a5 commit 13e2ecb

File tree

2 files changed

+6
-2
lines changed

2 files changed

+6
-2
lines changed

outlines/models/llamacpp.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -300,6 +300,8 @@ def generate(
300300
**inference_kwargs,
301301
)
302302
result = completion["choices"][0]["message"]["content"]
303+
else: # Never reached # pragma: no cover
304+
raise ValueError("Unexpected prompt type.")
303305

304306
self.model.reset()
305307

@@ -359,7 +361,8 @@ def generate_stream(
359361
)
360362
for chunk in generator:
361363
yield chunk["choices"][0]["delta"].get("content", "")
362-
364+
else: # Never reached # pragma: no cover
365+
raise ValueError("Unexpected prompt type.")
363366

364367
def from_llamacpp(model: "Llama", chat_mode: bool = True) -> LlamaCpp:
365368
"""Create an Outlines `LlamaCpp` model instance from a

tests/models/test_llamacpp.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,4 +239,5 @@ def test_llamacpp_no_chat(model_no_chat):
239239
assert isinstance(result, str)
240240

241241
generator = model_no_chat.stream("Respond with one word. Not more.", None)
242-
assert isinstance(next(generator), str)
242+
for x in generator:
243+
assert isinstance(x, str)

0 commit comments

Comments
 (0)