File tree Expand file tree Collapse file tree 2 files changed +4
-5
lines changed Expand file tree Collapse file tree 2 files changed +4
-5
lines changed Original file line number Diff line number Diff line change @@ -109,7 +109,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
109
109
frequency_penalty : Optional [float ] = 0.0
110
110
logit_bias : Optional [Dict [str , float ]] = None
111
111
logprobs : Optional [bool ] = False
112
- top_logprobs : Optional [int ] = None
112
+ top_logprobs : Optional [int ] = 0
113
113
max_tokens : Optional [int ] = None
114
114
n : Optional [int ] = 1
115
115
presence_penalty : Optional [float ] = 0.0
@@ -192,8 +192,7 @@ class ChatCompletionRequest(OpenAIBaseModel):
192
192
# doc: end-chat-completion-extra-params
193
193
194
194
def to_sampling_params (self ) -> SamplingParams :
195
- if self .logprobs and not self .top_logprobs :
196
- raise ValueError ("Top logprobs must be set when logprobs is." )
195
+ # We now allow logprobs being true without top_logrobs.
197
196
198
197
logits_processors = None
199
198
if self .logit_bias :
Original file line number Diff line number Diff line change @@ -286,7 +286,7 @@ async def chat_completion_stream_generator(
286
286
logprobs = self ._create_logprobs (
287
287
token_ids = delta_token_ids ,
288
288
top_logprobs = top_logprobs ,
289
- num_output_top_logprobs = request .logprobs ,
289
+ num_output_top_logprobs = request .top_logprobs ,
290
290
initial_text_offset = len (previous_texts [i ]),
291
291
)
292
292
else :
@@ -373,7 +373,7 @@ async def chat_completion_full_generator(
373
373
logprobs = self ._create_logprobs (
374
374
token_ids = token_ids ,
375
375
top_logprobs = top_logprobs ,
376
- num_output_top_logprobs = request .logprobs ,
376
+ num_output_top_logprobs = request .top_logprobs ,
377
377
)
378
378
else :
379
379
logprobs = None
You can’t perform that action at this time.
0 commit comments