Replies: 1 comment
-
The issue I found is that the class FixedChatMistralAI(ChatMistralAI):
def _stream(
self,
messages: List[BaseMessage],
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> Iterator[ChatGenerationChunk]:
message_dicts, params = self._create_message_dicts(messages, stop)
params = {**params, **kwargs, "stream": True}
default_chunk_class = AIMessageChunk
for chunk in self.completion_with_retry(
messages=message_dicts, run_manager=run_manager, **params
):
if len(chunk.choices) == 0:
continue
delta = chunk.choices[0].delta
if not delta.content:
# This is what changes
if hasattr(delta, "tool_calls") and getattr(delta, "tool_calls"):
# Set the content to empty string to prevent error in _convert_delta_to_message_chunk()
delta.content = ""
else:
continue
chunk = _convert_delta_to_message_chunk(delta, default_chunk_class)
default_chunk_class = chunk.__class__
if run_manager:
run_manager.on_llm_new_token(token=chunk.content, chunk=chunk)
yield ChatGenerationChunk(message=chunk)
async def _astream(
self,
messages: List[BaseMessage],
stop: Optional[List[str]] = None,
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> AsyncIterator[ChatGenerationChunk]:
message_dicts, params = self._create_message_dicts(messages, stop)
params = {**params, **kwargs, "stream": True}
default_chunk_class = AIMessageChunk
async for chunk in await acompletion_with_retry(
self, messages=message_dicts, run_manager=run_manager, **params
):
if len(chunk.choices) == 0:
continue
delta = chunk.choices[0].delta
if not delta.content:
# This is what changes
if hasattr(delta, "tool_calls") and getattr(delta, "tool_calls"):
# Set the content to empty string to prevent error in _convert_delta_to_message_chunk()
delta.content = ""
else:
continue
chunk = _convert_delta_to_message_chunk(delta, default_chunk_class)
default_chunk_class = chunk.__class__
if run_manager:
await run_manager.on_llm_new_token(token=chunk.content, chunk=chunk)
yield ChatGenerationChunk(message=chunk) |
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
Checked
Feature request
Just with openai agents, I want to be able to build an agent against Mistral-large, which is currently only available through the Mistral API. Currently this is not possible.
Motivation
Mistral-large seems to be capable of producing great reasoning just along the lines of GPT-4. If you want to implement an agent with function calling against mistral-large, you currently are completely blocked by the lack of an agent implementation for the Mistral API.
Proposal (If applicable)
No response
Beta Was this translation helpful? Give feedback.
All reactions