Skip to content

Commit 170c96a

Browse files
committed
🥅 Keep current request validation handling for other endpoints
Signed-off-by: Evaline Ju <69598118+evaline-ju@users.noreply.github.com>
1 parent ae4c4f3 commit 170c96a

File tree

1 file changed

+22
-3
lines changed

1 file changed

+22
-3
lines changed

vllm_detector_adapter/api_server.py

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
# Standard
22
from argparse import Namespace
3+
from http import HTTPStatus
34
import inspect
45
import signal
56

@@ -14,7 +15,7 @@
1415
from vllm.entrypoints.logger import RequestLogger
1516
from vllm.entrypoints.openai import api_server
1617
from vllm.entrypoints.openai.cli_args import make_arg_parser, validate_parsed_serve_args
17-
from vllm.entrypoints.openai.protocol import ErrorResponse
18+
from vllm.entrypoints.openai.protocol import ErrorInfo, ErrorResponse
1819
from vllm.entrypoints.openai.serving_models import BaseModelPath, OpenAIServingModels
1920
from vllm.entrypoints.openai.tool_parsers import ToolParserManager
2021
from vllm.utils import FlexibleArgumentParser, is_valid_ipv6_address, set_ulimit
@@ -173,8 +174,26 @@ async def validation_exception_handler(
173174
return JSONResponse(
174175
status_code=exc.error.code, content=exc.error.model_dump()
175176
)
176-
# For other routes, let FastAPI handle normally
177-
raise exc
177+
else:
178+
# vLLM general request validation error handling
179+
exc_str = str(exc)
180+
errors_str = str(exc.errors())
181+
182+
if exc.errors() and errors_str and errors_str != exc_str:
183+
message = f"{exc_str} {errors_str}"
184+
else:
185+
message = exc_str
186+
187+
err = ErrorResponse(
188+
error=ErrorInfo(
189+
message=message,
190+
type=HTTPStatus.BAD_REQUEST.phrase,
191+
code=HTTPStatus.BAD_REQUEST,
192+
)
193+
)
194+
return JSONResponse(
195+
err.model_dump(), status_code=HTTPStatus.BAD_REQUEST
196+
)
178197

179198
# api_server.init_app_state takes vllm_config
180199
# ref. https://github.com/vllm-project/vllm/pull/16572

0 commit comments

Comments
 (0)