We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 97a0130 commit 4f9ce35Copy full SHA for 4f9ce35
tests/entrypoints/openai/test_chunked_prompt.py
@@ -66,8 +66,11 @@ async def test_completion_stream_options_and_logprobs_with_long_prompts(
66
chunk.usage.prompt_tokens + chunk.usage.completion_tokens
67
)
68
if not finished:
69
- tokens_received += 1
70
assert chunk.choices[0].text
+ # Count actual tokens from logprobs since multiple tokens
71
+ # can be batched into a single chunk
72
+ assert chunk.choices[0].logprobs and chunk.choices[0].logprobs.tokens
73
+ tokens_received += len(chunk.choices[0].logprobs.tokens)
74
75
if chunk.choices[0].finish_reason is not None:
76
finished = True
0 commit comments