Skip to content

Commit ee98a28

Browse files
committed
mistral ai model added
1 parent f272c0d commit ee98a28

File tree

1 file changed

+66
-0
lines changed

1 file changed

+66
-0
lines changed

src/openagi/llms/mistral.py

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
from openagi.exception import OpenAGIException
2+
from openagi.llms.base import LLMBaseModel, LLMConfigModel
3+
from openagi.utils.yamlParse import read_from_env
4+
5+
import logging
6+
from typing import Any
7+
from langchain_core.messages import HumanMessage
8+
9+
try:
10+
from langchain_mistralai import ChatMistralAI
11+
except ImportError:
12+
raise OpenAGIException("Install langchain Mistral AI with cmd `pip install langchain_mistralai`")
13+
14+
15+
class MistralConfigModel(LLMConfigModel):
16+
"""Configuration model for Mistral."""
17+
18+
mistral_api_key: str
19+
model_name: str = "mistral-large-latest"
20+
temperature: float = 0.1
21+
22+
class MistralModel(LLMBaseModel):
23+
"""Mistral service implementation of the LLMBaseModel.
24+
25+
This class implements the specific logic required to work with Mistral service.
26+
"""
27+
28+
config: Any
29+
30+
def load(self):
31+
"""Initializes the Mistral instance with configurations."""
32+
self.llm = ChatMistralAI(
33+
model = self.config.model_name,
34+
temperature = self.config.temperature,
35+
api_key = self.config.mistral_api_key
36+
)
37+
return self.llm
38+
39+
def run(self, input_text: str):
40+
"""Runs the Mistral model with the provided input text.
41+
42+
Args:
43+
input_text: The input text to process.
44+
45+
Returns:
46+
The response from Mistral service.
47+
"""
48+
logging.info(f"Running LLM - {self.__class__.__name__}")
49+
if not self.llm:
50+
self.load()
51+
if not self.llm:
52+
raise ValueError("`llm` attribute not set.")
53+
message = HumanMessage(content=input_text)
54+
resp = self.llm([message])
55+
return resp.content
56+
57+
@staticmethod
58+
def load_from_env_config() -> MistralConfigModel:
59+
"""Loads the Mistral configurations from a YAML file.
60+
61+
Returns:
62+
An instance of MistralConfigModel with loaded configurations.
63+
"""
64+
return MistralConfigModel(
65+
mistral_api_key=read_from_env("MISTRAL_API_KEY", raise_exception=True),
66+
)

0 commit comments

Comments
 (0)