Skip to content

Commit 7c3604f

Browse files
authored
[Bugfix] logprobs is not compatible with the OpenAI spec #4795 (#5031)
1 parent b1c2556 commit 7c3604f

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

vllm/entrypoints/openai/protocol.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
109109
frequency_penalty: Optional[float] = 0.0
110110
logit_bias: Optional[Dict[str, float]] = None
111111
logprobs: Optional[bool] = False
112-
top_logprobs: Optional[int] = None
112+
top_logprobs: Optional[int] = 0
113113
max_tokens: Optional[int] = None
114114
n: Optional[int] = 1
115115
presence_penalty: Optional[float] = 0.0
@@ -192,8 +192,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
192192
# doc: end-chat-completion-extra-params
193193

194194
def to_sampling_params(self) -> SamplingParams:
195-
if self.logprobs and not self.top_logprobs:
196-
raise ValueError("Top logprobs must be set when logprobs is.")
195+
# We now allow logprobs being true without top_logrobs.
197196

198197
logits_processors = None
199198
if self.logit_bias:

vllm/entrypoints/openai/serving_chat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ async def chat_completion_stream_generator(
286286
logprobs = self._create_logprobs(
287287
token_ids=delta_token_ids,
288288
top_logprobs=top_logprobs,
289-
num_output_top_logprobs=request.logprobs,
289+
num_output_top_logprobs=request.top_logprobs,
290290
initial_text_offset=len(previous_texts[i]),
291291
)
292292
else:
@@ -373,7 +373,7 @@ async def chat_completion_full_generator(
373373
logprobs = self._create_logprobs(
374374
token_ids=token_ids,
375375
top_logprobs=top_logprobs,
376-
num_output_top_logprobs=request.logprobs,
376+
num_output_top_logprobs=request.top_logprobs,
377377
)
378378
else:
379379
logprobs = None

0 commit comments

Comments
 (0)