Skip to content

Commit e1f14dd

Browse files
committed
Fix ruff/codex errors
Signed-off-by: Aleksandr Samarin <[email protected]>
1 parent 9936d60 commit e1f14dd

File tree

1 file changed

+13
-11
lines changed

1 file changed

+13
-11
lines changed

vllm/entrypoints/openai/serving_chat.py

Lines changed: 13 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import time
77
from collections.abc import AsyncGenerator, AsyncIterator
88
from collections.abc import Sequence as GenericSequence
9-
from typing import Callable, Final, Optional, Union
9+
from typing import Any, Callable, Final, Optional, Union
1010

1111
import jinja2
1212
import partial_json_parser
@@ -790,11 +790,10 @@ async def chat_completion_stream_generator(
790790

791791
if self.use_harmony:
792792
# Group consecutive tokens with same channel/recipient
793-
groups = []
793+
groups: list[dict[str, str]] = []
794794
for channel, recipient, text in token_states:
795-
if not text:
796-
continue
797-
if groups and groups[-1]['channel'] == channel and groups[-1]['recipient'] == recipient:
795+
if (groups and groups[-1]['channel'] == channel
796+
and groups[-1]['recipient'] == recipient):
798797
groups[-1]['text'] += text
799798
else:
800799
groups.append({
@@ -849,14 +848,16 @@ async def chat_completion_stream_generator(
849848
index=next_tool_index,
850849
))
851850
prev_recipient = group_recipient
852-
# Increment for any subsequent new tool calls in this chunk
851+
# Increment for subsequent new tool calls
853852
next_tool_index += 1
854853

855854
if group_text:
856855
# Stream arguments for the ongoing tool call
857-
# The current call index is next_tool_index - 1 if we just
858-
# opened it, OR base_index if continuing from prev chunk
859-
tool_call_index = next_tool_index - 1 if next_tool_index > base_index else base_index
856+
# Use next_tool_index - 1 if we opened a call
857+
# this chunk, else base_index for ongoing
858+
tool_call_index = (next_tool_index - 1
859+
if next_tool_index > base_index
860+
else base_index)
860861
tool_messages.append(DeltaToolCall(
861862
index=tool_call_index,
862863
function=DeltaFunctionCall(
@@ -865,7 +866,7 @@ async def chat_completion_stream_generator(
865866

866867
# Combine all non-empty fields into a single message
867868
if combined_content or combined_reasoning or tool_messages:
868-
delta_kwargs = {}
869+
delta_kwargs: dict[str, Any] = {}
869870
if combined_content:
870871
delta_kwargs['content'] = combined_content
871872
if combined_reasoning:
@@ -1117,7 +1118,8 @@ async def chat_completion_stream_generator(
11171118
if delta_message.content:
11181119
delta_content_parts.append(delta_message.content)
11191120
if delta_message.reasoning_content:
1120-
delta_content_parts.append(f"[reasoning: {delta_message.reasoning_content}]")
1121+
reasoning = delta_message.reasoning_content
1122+
delta_content_parts.append(f"[reasoning: {reasoning}]")
11211123
if delta_message.tool_calls:
11221124
tool_args = "".join(
11231125
tc.function.arguments

0 commit comments

Comments
 (0)