Skip to content

Commit 27ca23d

Browse files
authored
Remove exclude_unset in streaming response (#3143)
1 parent 54d3544 commit 27ca23d

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

vllm/entrypoints/openai/serving_completion.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ async def completion_stream_generator(
9696
logprobs=logprobs,
9797
finish_reason=finish_reason,
9898
)
99-
]).model_dump_json(exclude_unset=True)
99+
]).model_dump_json()
100100
yield f"data: {response_json}\n\n"
101101

102102
if output.finish_reason is not None: # return final usage
@@ -121,7 +121,7 @@ async def completion_stream_generator(
121121
)
122122
],
123123
usage=final_usage,
124-
).model_dump_json(exclude_unset=True)
124+
).model_dump_json()
125125
yield f"data: {response_json}\n\n"
126126

127127
yield "data: [DONE]\n\n"
@@ -306,7 +306,7 @@ async def create_completion(self, request: CompletionRequest,
306306
request, prompt=prompt)
307307

308308
generators.append(
309-
self.engine.generate(None,
309+
self.engine.generate(prompt,
310310
sampling_params,
311311
f"{request_id}-{i}",
312312
prompt_token_ids=input_ids,

0 commit comments

Comments
 (0)