File tree Expand file tree Collapse file tree 4 files changed +67
-0
lines changed
src/examples/mistral_example Expand file tree Collapse file tree 4 files changed +67
-0
lines changed Original file line number Diff line number Diff line change 1+ import os
2+ from langtrace_python_sdk import langtrace
3+ from mistralai import Mistral
4+
5+ langtrace .init (api_key = os .environ ["LANGTRACE_API_KEY" ])
6+
7+ api_key = os .environ ["MISTRAL_API_KEY" ]
8+ model = "mistral-large-latest"
9+
10+ client = Mistral (api_key = api_key )
11+
12+ def main ():
13+ chat_response = client .chat .complete (
14+ model = model ,
15+ messages = [
16+ {
17+ "role" : "user" ,
18+ "content" : "I need 10 cocktail recipes with tequila other than the classics like margarita, tequila"
19+ },
20+ ]
21+ )
22+ print (chat_response .choices [0 ].message .content )
23+
24+
25+ if __name__ == "__main__" :
26+ main ()
Original file line number Diff line number Diff line change 1+ import asyncio
2+ from langtrace_python_sdk import langtrace
3+ from mistralai import Mistral
4+ import os
5+
6+ langtrace .init (api_key = os .environ ["LANGTRACE_API_KEY" ])
7+
8+ async def main ():
9+ s = Mistral (
10+ api_key = os .getenv ("MISTRAL_API_KEY" , "" ),
11+ )
12+ res = await s .chat .complete_async (model = "mistral-small-latest" , messages = [
13+ {
14+ "content" : "Which locations should I visit when I travel to New york? Answer in one short sentence." ,
15+ "role" : "user" ,
16+ },
17+ ])
18+ if res is not None :
19+ # handle response
20+ print (res .choices [0 ].message .content )
21+ pass
22+
23+ asyncio .run (main ())
24+ print ("hello" )
Original file line number Diff line number Diff line change 1+ import os
2+ from langtrace_python_sdk import langtrace
3+ from mistralai import Mistral
4+
5+ langtrace .init (api_key = os .environ ["LANGTRACE_API_KEY" ])
6+
7+ api_key = os .environ ["MISTRAL_API_KEY" ]
8+ model = "mistral-embed"
9+
10+ client = Mistral (api_key = api_key )
11+
12+ embeddings_batch_response = client .embeddings .create (
13+ model = model ,
14+ inputs = ["Embed this sentence." , "As well as this one." ],
15+ )
16+
17+ print (embeddings_batch_response .data [0 ].embedding )
You can’t perform that action at this time.
0 commit comments