Skip to content

Commit cdeaded

Browse files
committed
Bugfix: Ensure logs are printed when streaming
1 parent 3c96b43 commit cdeaded

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

llama_cpp/llama.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -709,6 +709,9 @@ def _create_completion(
709709
print("Llama._create_completion: cache save", file=sys.stderr)
710710
self.cache[prompt_tokens + completion_tokens] = self.save_state()
711711

712+
if self.verbose:
713+
llama_cpp.llama_print_timings(self.ctx)
714+
712715
if stream:
713716
yield {
714717
"id": completion_id,
@@ -780,9 +783,6 @@ def _create_completion(
780783
"top_logprobs": top_logprobs,
781784
}
782785

783-
if self.verbose:
784-
llama_cpp.llama_print_timings(self.ctx)
785-
786786
yield {
787787
"id": completion_id,
788788
"object": "text_completion",

0 commit comments

Comments
 (0)