Skip to content

Commit d020c4b

Browse files
committed
If prompt/output token count is 0, don't set stats/constraints
1 parent 308840a commit d020c4b

File tree

1 file changed

+6
-8
lines changed

1 file changed

+6
-8
lines changed

src/guidellm/request/session.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
import itertools
22
from abc import ABC, abstractmethod
3-
from collections.abc import Sequence
4-
from typing import Generic
3+
from typing import TYPE_CHECKING, Generic
4+
5+
if TYPE_CHECKING:
6+
from collections.abc import Sequence
57

68
from guidellm.backend.response import ResponseSummary
79
from guidellm.config import settings
@@ -69,12 +71,8 @@ def get_next_request(self) -> GenerationRequest:
6971
return GenerationRequest(
7072
request_type=settings.preferred_route,
7173
content=content,
72-
stats=(
73-
{"prompt_tokens": prompt_tokens} if prompt_tokens is not None else {}
74-
),
75-
constraints=(
76-
{"output_tokens": output_tokens} if output_tokens is not None else {}
77-
),
74+
stats=({"prompt_tokens": prompt_tokens} if prompt_tokens else {}),
75+
constraints=({"output_tokens": output_tokens} if output_tokens else {}),
7876
)
7977

8078
def get_next_delay(self) -> float:

0 commit comments

Comments
 (0)