File tree Expand file tree Collapse file tree 4 files changed +45
-36
lines changed
src/examples/mistral_example Expand file tree Collapse file tree 4 files changed +45
-36
lines changed Original file line number Diff line number Diff line change 1+ import asyncio
2+ from examples .mistral_example .complete import chat_complete
3+ from examples .mistral_example .complete_async import complete_async
4+ from examples .mistral_example .embeddings import embeddings_create
5+ from langtrace_python_sdk import with_langtrace_root_span
6+
7+
8+ class MistralRunner :
9+ @with_langtrace_root_span ("Mistral" )
10+ def run (self ):
11+ chat_complete ()
12+ asyncio .run (complete_async ())
13+ embeddings_create ()
Original file line number Diff line number Diff line change 1- import os
2- from langtrace_python_sdk import langtrace
1+ from dotenv import find_dotenv , load_dotenv
2+ from langtrace_python_sdk import langtrace , with_langtrace_root_span
33from mistralai import Mistral
44
5- langtrace . init ( api_key = os . environ [ "LANGTRACE_API_KEY" ] )
5+ _ = load_dotenv ( find_dotenv () )
66
7- api_key = os .environ ["MISTRAL_API_KEY" ]
8- model = "mistral-large-latest"
7+ langtrace .init ()
98
10- client = Mistral (api_key = api_key )
11-
12- def main ():
9+ @with_langtrace_root_span ("chat_complete" )
10+ def chat_complete ():
11+ model = "mistral-large-latest"
12+ client = Mistral ()
1313 chat_response = client .chat .complete (
1414 model = model ,
1515 messages = [
@@ -20,7 +20,3 @@ def main():
2020 ]
2121 )
2222 print (chat_response .choices [0 ].message .content )
23-
24-
25- if __name__ == "__main__" :
26- main ()
Original file line number Diff line number Diff line change 1- import asyncio
2- from langtrace_python_sdk import langtrace
1+ from dotenv import find_dotenv , load_dotenv
2+ from langtrace_python_sdk import langtrace , with_langtrace_root_span
33from mistralai import Mistral
4- import os
54
6- langtrace . init ( api_key = os . environ [ "LANGTRACE_API_KEY" ] )
5+ _ = load_dotenv ( find_dotenv () )
76
8- async def main ():
9- s = Mistral (
10- api_key = os .getenv ("MISTRAL_API_KEY" , "" ),
11- )
12- res = await s .chat .complete_async (model = "mistral-small-latest" , messages = [
7+ langtrace .init ()
8+
9+ @with_langtrace_root_span ("chat_complete_async" )
10+ async def complete_async ():
11+ client = Mistral ()
12+ res = await client .chat .complete_async (model = "mistral-small-latest" , messages = [
1313 {
1414 "content" : "Which locations should I visit when I travel to New york? Answer in one short sentence." ,
1515 "role" : "user" ,
@@ -18,7 +18,3 @@ async def main():
1818 if res is not None :
1919 # handle response
2020 print (res .choices [0 ].message .content )
21- pass
22-
23- asyncio .run (main ())
24- print ("hello" )
Original file line number Diff line number Diff line change 1- import os
2- from langtrace_python_sdk import langtrace
1+ from dotenv import find_dotenv , load_dotenv
2+ from langtrace_python_sdk import langtrace , with_langtrace_root_span
33from mistralai import Mistral
44
5- langtrace . init ( api_key = os . environ [ "LANGTRACE_API_KEY" ] )
5+ _ = load_dotenv ( find_dotenv () )
66
7- api_key = os .environ ["MISTRAL_API_KEY" ]
8- model = "mistral-embed"
7+ langtrace .init ()
98
10- client = Mistral (api_key = api_key )
119
12- embeddings_batch_response = client .embeddings .create (
13- model = model ,
14- inputs = ["Embed this sentence." , "As well as this one." ],
15- )
10+ @with_langtrace_root_span ("create_embeddings" )
11+ def embeddings_create ():
12+ model = "mistral-embed"
1613
17- print (embeddings_batch_response .data [0 ].embedding )
14+ client = Mistral ()
15+
16+ embeddings_batch_response = client .embeddings .create (
17+ model = model ,
18+ inputs = ["Embed this sentence." , "As well as this one." ],
19+ )
20+
21+ print (embeddings_batch_response .data [0 ].embedding )
You can’t perform that action at this time.
0 commit comments