Skip to content

Commit dddaab2

Browse files
committed
fix token count for streaming responses api
1 parent 3ad5e50 commit dddaab2

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

sentry_sdk/integrations/openai.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,8 +126,10 @@ def _calculate_token_usage(
126126
# Manually count tokens
127127
if input_tokens == 0:
128128
for message in messages:
129-
if "content" in message:
129+
if isinstance(message, dict) and "content" in message:
130130
input_tokens += count_tokens(message["content"])
131+
elif isinstance(message, str):
132+
input_tokens += count_tokens(message)
131133

132134
if output_tokens == 0:
133135
if streaming_message_responses is not None:
@@ -246,6 +248,7 @@ def new_iterator():
246248
# type: () -> Iterator[ChatCompletionChunk]
247249
with capture_internal_exceptions():
248250
for x in old_iterator:
251+
# OpenAI chat completion API
249252
if hasattr(x, "choices"):
250253
choice_index = 0
251254
for choice in x.choices:
@@ -257,6 +260,11 @@ def new_iterator():
257260
data_buf.append([])
258261
data_buf[choice_index].append(content or "")
259262
choice_index += 1
263+
# OpenAI responses API
264+
elif hasattr(x, "delta"):
265+
if len(data_buf) == 0:
266+
data_buf.append([])
267+
data_buf[0].append(x.delta or "")
260268
yield x
261269
if len(data_buf) > 0:
262270
all_responses = list(map(lambda chunk: "".join(chunk), data_buf))

0 commit comments

Comments
 (0)