File tree Expand file tree Collapse file tree 2 files changed +6
-2
lines changed
Expand file tree Collapse file tree 2 files changed +6
-2
lines changed Original file line number Diff line number Diff line change @@ -300,6 +300,8 @@ def generate(
300300 ** inference_kwargs ,
301301 )
302302 result = completion ["choices" ][0 ]["message" ]["content" ]
303+ else : # Never reached # pragma: no cover
304+ raise ValueError ("Unexpected prompt type." )
303305
304306 self .model .reset ()
305307
@@ -359,7 +361,8 @@ def generate_stream(
359361 )
360362 for chunk in generator :
361363 yield chunk ["choices" ][0 ]["delta" ].get ("content" , "" )
362-
364+ else : # Never reached # pragma: no cover
365+ raise ValueError ("Unexpected prompt type." )
363366
364367def from_llamacpp (model : "Llama" , chat_mode : bool = True ) -> LlamaCpp :
365368 """Create an Outlines `LlamaCpp` model instance from a
Original file line number Diff line number Diff line change @@ -239,4 +239,5 @@ def test_llamacpp_no_chat(model_no_chat):
239239 assert isinstance (result , str )
240240
241241 generator = model_no_chat .stream ("Respond with one word. Not more." , None )
242- assert isinstance (next (generator ), str )
242+ for x in generator :
243+ assert isinstance (x , str )
You can’t perform that action at this time.
0 commit comments