We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 6c2cfb6 commit 21de6d4Copy full SHA for 21de6d4
tests/entrypoints/openai/test_chunked_prompt.py
@@ -116,7 +116,10 @@ async def test_chat_completion_stream_options_and_logprobs_with_long_prompts(
116
assert chunk.choices[0].logprobs is None
117
empty_chunks_received += 1
118
else:
119
- tokens_received += 1
+ # Count actual tokens from logprobs since multiple tokens
120
+ # can be batched into a single chunk
121
+ assert chunk.choices[0].logprobs and chunk.choices[0].logprobs.content
122
+ tokens_received += len(chunk.choices[0].logprobs.content)
123
124
if chunk.choices[0].finish_reason is not None:
125
finished = True
0 commit comments