Skip to content

Commit 296bc92

Browse files
authored
Feat: fix stream output and add openai stream (#276)
feat:add openai stream
1 parent d86b0b5 commit 296bc92

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

src/memos/mem_os/product.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1024,7 +1024,7 @@ def chat_with_references(
10241024
elif self.config.chat_model.backend == "vllm":
10251025
response_stream = self.chat_llm.generate_stream(current_messages)
10261026
else:
1027-
if self.config.chat_model.backend in ["huggingface", "vllm"]:
1027+
if self.config.chat_model.backend in ["huggingface", "vllm", "openai"]:
10281028
response_stream = self.chat_llm.generate_stream(current_messages)
10291029
else:
10301030
response_stream = self.chat_llm.generate(current_messages)
@@ -1041,7 +1041,7 @@ def chat_with_references(
10411041
full_response = ""
10421042
token_count = 0
10431043
# Use tiktoken for proper token-based chunking
1044-
if self.config.chat_model.backend not in ["huggingface", "vllm"]:
1044+
if self.config.chat_model.backend not in ["huggingface", "vllm", "openai"]:
10451045
# For non-huggingface backends, we need to collect the full response first
10461046
full_response_text = ""
10471047
for chunk in response_stream:

0 commit comments

Comments
 (0)