Skip to content

Commit 67e54ed

Browse files
committed
fix: Tokens cannot be obtained from the model dialogue
1 parent 0d96f79 commit 67e54ed

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

apps/setting/models_provider/impl/base_chat_open_ai.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,8 @@ def _stream(
5252
run_manager: Optional[CallbackManagerForLLMRun] = None,
5353
**kwargs: Any,
5454
) -> Iterator[ChatGenerationChunk]:
55-
55+
kwargs["stream"] = True
56+
kwargs["stream_options"] = {"include_usage": True}
5657
"""Set default stream_options."""
5758
stream_usage = self._should_stream_usage(kwargs.get('stream_usage'), **kwargs)
5859
# Note: stream_options is not a valid parameter for Azure OpenAI.
@@ -63,7 +64,6 @@ def _stream(
6364
if stream_usage:
6465
kwargs["stream_options"] = {"include_usage": stream_usage}
6566

66-
kwargs["stream"] = True
6767
payload = self._get_request_payload(messages, stop=stop, **kwargs)
6868
default_chunk_class: Type[BaseMessageChunk] = AIMessageChunk
6969
base_generation_info = {}
@@ -107,9 +107,6 @@ def _stream(
107107
continue
108108

109109
# custom code
110-
if generation_chunk.message.usage_metadata is not None:
111-
self.usage_metadata = generation_chunk.message.usage_metadata
112-
# custom code
113110
if len(chunk['choices']) > 0 and 'reasoning_content' in chunk['choices'][0]['delta']:
114111
generation_chunk.message.additional_kwargs["reasoning_content"] = chunk['choices'][0]['delta'][
115112
'reasoning_content']
@@ -121,6 +118,9 @@ def _stream(
121118
generation_chunk.text, chunk=generation_chunk, logprobs=logprobs
122119
)
123120
is_first_chunk = False
121+
# custom code
122+
if generation_chunk.message.usage_metadata is not None:
123+
self.usage_metadata = generation_chunk.message.usage_metadata
124124
yield generation_chunk
125125

126126
def _create_chat_result(self,

0 commit comments

Comments
 (0)