We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 99d19ac commit 0bbeddcCopy full SHA for 0bbeddc
fastchat/conversation.py
@@ -535,7 +535,7 @@ def get_conv_template(name: str) -> Conversation:
535
roles=("### Instruction:", "### Response:"),
536
sep="\n",
537
stop_str="<|EOT|>",
538
- sep_style=SeparatorStyle.ADD_NEW_LINE_SINGLE
+ sep_style=SeparatorStyle.ADD_NEW_LINE_SINGLE,
539
)
540
541
fastchat/serve/inference.py
@@ -118,6 +118,7 @@ def generate_stream(
118
token_logprobs = [None] # The first token has no logprobs.
119
sent_interrupt = False
120
finish_reason = None
121
+ stopped = False
122
for i in range(max_new_tokens):
123
if i == 0: # prefill
124
if model.config.is_encoder_decoder:
0 commit comments