Skip to content

Commit 75df49d

Browse files
committed
fix mistral examples
1 parent c6b10db commit 75df49d

File tree

4 files changed

+14
-13
lines changed

4 files changed

+14
-13
lines changed

src/examples/mistral_example/complete.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
1-
from dotenv import find_dotenv, load_dotenv
1+
import os
22
from langtrace_python_sdk import with_langtrace_root_span
33
from mistralai import Mistral
44

5-
_ = load_dotenv(find_dotenv())
6-
75
@with_langtrace_root_span("chat_complete")
86
def chat_complete():
97
model = "mistral-large-latest"
10-
client = Mistral()
8+
client = Mistral(api_key=os.environ["MISTRAL_API_KEY"])
119
chat_response = client.chat.complete(
1210
model= model,
1311
messages = [

src/examples/mistral_example/complete_async.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
1-
from dotenv import find_dotenv, load_dotenv
1+
import os
22
from langtrace_python_sdk import with_langtrace_root_span
33
from mistralai import Mistral
44

5-
_ = load_dotenv(find_dotenv())
6-
75
@with_langtrace_root_span("chat_complete_async")
86
async def complete_async():
9-
client = Mistral()
7+
client = Mistral(api_key=os.environ["MISTRAL_API_KEY"])
108
res = await client.chat.complete_async(model="mistral-small-latest", messages=[
119
{
1210
"content": "Which locations should I visit when I travel to New york? Answer in one short sentence.",

src/examples/mistral_example/embeddings.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,13 @@
1-
from dotenv import find_dotenv, load_dotenv
1+
import os
22
from langtrace_python_sdk import with_langtrace_root_span
33
from mistralai import Mistral
44

5-
_ = load_dotenv(find_dotenv())
6-
75

86
@with_langtrace_root_span("create_embeddings")
97
def embeddings_create():
108
model = "mistral-embed"
119

12-
client = Mistral()
10+
client = Mistral(api_key=os.environ["MISTRAL_API_KEY"])
1311

1412
embeddings_batch_response = client.embeddings.create(
1513
model=model,

src/run_example.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,8 @@
1616
"ollama": False,
1717
"groq": False,
1818
"vertexai": False,
19-
"gemini": True,
19+
"gemini": False,
20+
"mistral": True,
2021
}
2122

2223
if ENABLED_EXAMPLES["anthropic"]:
@@ -103,3 +104,9 @@
103104

104105
print(Fore.BLUE + "Running Gemini example" + Fore.RESET)
105106
GeminiRunner().run()
107+
108+
if ENABLED_EXAMPLES["mistral"]:
109+
from examples.mistral_example import MistralRunner
110+
111+
print(Fore.BLUE + "Running Mistral example" + Fore.RESET)
112+
MistralRunner().run()

0 commit comments

Comments
 (0)