Skip to content

Commit bd3a9b2

Browse files
authored
fix: xinference-chat-stream-response (#991)
1 parent 18d3877 commit bd3a9b2

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

api/core/third_party/langchain/llms/xinference_llm.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,12 @@ def _stream_generate(
123123
if choices:
124124
choice = choices[0]
125125
if isinstance(choice, dict):
126-
token = choice.get("text", "")
126+
if 'text' in choice:
127+
token = choice.get("text", "")
128+
elif 'delta' in choice and 'content' in choice['delta']:
129+
token = choice.get('delta').get('content')
130+
else:
131+
continue
127132
log_probs = choice.get("logprobs")
128133
if run_manager:
129134
run_manager.on_llm_new_token(

0 commit comments

Comments
 (0)