Skip to content

Commit 02cd79b

Browse files
authored
Incorrectly judging the request as a bad request (#4121)
* Incorrectly judging the request as a Bad Request * fix * final
1 parent 343eede commit 02cd79b

File tree

4 files changed

+14
-8
lines changed

4 files changed

+14
-8
lines changed

lmdeploy/pytorch/engine/engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -875,7 +875,7 @@ def _make_infer_outputs(
875875
# logprobs
876876
num_logprobs = msg.sampling_param.num_logprobs
877877
cur_logprobs = None
878-
if num_logprobs >= 0:
878+
if logprobs is not None:
879879
cur_logprobs = (logprobs.vals[idx][:num_logprobs + 1], logprobs.indices[idx][:num_logprobs + 1])
880880

881881
req_metrics = RequestMetrics(new_token_timestamp, msg.engine_events)

lmdeploy/serve/openai/serving_chat_completion.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,13 @@ def check_request(request: ChatCompletionRequest, engine_config: 'TurbomindEngin
1515
try:
1616
logprobs_mode = engine_config.logprobs_mode
1717
logprobs = request.logprobs
18-
top_logprobs = request.top_logprobs
19-
if logprobs_mode is None and (logprobs is not None or top_logprobs is not None):
20-
return 'Logprobs or top_logprobs requested but not enabled logprobs_mode in engine configuration.'
18+
top_logprobs = request.top_logprobs or 0
19+
if logprobs_mode is None and (logprobs or top_logprobs > 0):
20+
return (f'Logprobs({logprobs})/top_logprobs({top_logprobs}) requested '
21+
'but not enabled logprobs_mode in engine configuration')
22+
if logprobs_mode is not None and (top_logprobs < 0 or (not logprobs and top_logprobs > 0)):
23+
return (f'Invalid logprobs({logprobs})/top_logprobs({top_logprobs}) requested '
24+
'when logprobs_mode is enabled in engine configuration.')
2125
except AttributeError:
2226
pass
2327

lmdeploy/serve/openai/serving_completion.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,11 @@ def check_request(request: CompletionRequest, engine_config: 'TurbomindEngineCon
1414
# Check logprobs settings
1515
try:
1616
logprobs_mode = engine_config.logprobs_mode
17-
logprobs = request.logprobs
18-
if logprobs_mode is None and logprobs:
19-
return 'logprobs requested but not enabled logprobs_mode in engine configuration.'
17+
logprobs = request.logprobs or 0
18+
if logprobs > 0 and logprobs_mode is None:
19+
return f'logprobs({logprobs}) requested but not enabled logprobs_mode in engine configuration.'
20+
if logprobs_mode is not None and logprobs < 0:
21+
return 'logprobs must be non-negative when logprobs_mode is enabled in engine configuration.'
2022
except AttributeError:
2123
pass
2224

lmdeploy/serve/openai/serving_generate.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def check_request(request: GenerateReqInput, engine_config: 'TurbomindEngineConf
1616
logprobs_mode = engine_config.logprobs_mode
1717
return_logprob = request.return_logprob
1818
if logprobs_mode is None and return_logprob:
19-
return 'return_logprob requested but not enabled logprobs_mode in engine configuration.'
19+
return f'return_logprob({return_logprob}) requested but not enabled logprobs_mode in engine configuration.'
2020
except AttributeError:
2121
pass
2222

0 commit comments

Comments
 (0)