Skip to content

Commit c433e05

Browse files
committed
fix
1 parent 2c92f6f commit c433e05

File tree

3 files changed

+9
-16
lines changed

3 files changed

+9
-16
lines changed

fastdeploy/input/ernie4_5_processor.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -240,11 +240,9 @@ def process_request_dict(self, request, max_model_len=None):
240240
if self.reasoning_parser and self.reasoning_parser.__class__.__name__ == "ErnieX1ReasoningParser":
241241
request["enable_thinking"] = True
242242
if self.reasoning_parser:
243-
self.model_status_dict[request["request_id"]] = self.reasoning_parser.get_model_status(
244-
request["prompt_token_ids"]
245-
)
246-
if self.model_status_dict[request["request_id"]] == "think_start":
247-
request["enable_thinking"] = True
243+
model_status = self.reasoning_parser.get_model_status(request["prompt_token_ids"])
244+
self.model_status_dict[request["request_id"]] = model_status
245+
request["enable_thinking"] = model_status == "think_start"
248246
data_processor_logger.info(f"Processed request dict: {request}")
249247
return request
250248

fastdeploy/input/ernie4_5_vl_processor/ernie4_5_vl_processor.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -257,11 +257,9 @@ def process_request_dict(self, request, max_model_len=None):
257257
data_processor_logger.info(f"Processed request {request}")
258258

259259
if self.reasoning_parser:
260-
self.model_status_dict[request.request_id] = self.reasoning_parser.get_model_status(
261-
request.prompt_token_ids
262-
)
263-
if self.model_status_dict[request.request_id] == "think_start":
264-
request.enable_thinking = True
260+
model_status = self.reasoning_parser.get_model_status(request["prompt_token_ids"])
261+
self.model_status_dict[request["request_id"]] = model_status
262+
request["enable_thinking"] = model_status == "think_start"
265263

266264
return request
267265

fastdeploy/input/text_processor.py

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -347,11 +347,9 @@ def process_request_dict(self, request, max_model_len=None, **kwargs):
347347
if request.get("top_p") < _SAMPLING_EPS:
348348
request["top_p"] = _SAMPLING_EPS
349349
if self.reasoning_parser:
350-
self.model_status_dict[request["request_id"]] = self.reasoning_parser.get_model_status(
351-
request["prompt_token_ids"]
352-
)
353-
if self.model_status_dict[request["request_id"]] == "think_start":
354-
request["enable_thinking"] = True
350+
model_status = self.reasoning_parser.get_model_status(request["prompt_token_ids"])
351+
self.model_status_dict[request["request_id"]] = model_status
352+
request["enable_thinking"] = model_status == "think_start"
355353

356354
data_processor_logger.info(f"Processed request dict: {request}")
357355
return request
@@ -376,7 +374,6 @@ def process_response(self, response_dict, **kwargs):
376374
token_ids = token_ids[:-1]
377375
full_text = self.tokenizer.decode(token_ids)
378376
response_dict.outputs.text = full_text
379-
# 模型支持思考,并且支持思考
380377
if self.reasoning_parser:
381378
reasoning_content, text = self.reasoning_parser.extract_reasoning_content(
382379
full_text, response_dict, self.model_status_dict[req_id]

0 commit comments

Comments
 (0)