@@ -99,23 +99,28 @@ print(chat_response.choices[0].message.content)
9999
100100``` python
101101import os
102+
102103from mistralai import Mistral, UserMessage
103104
104105api_key = os.environ[" MISTRAL_API_KEY" ]
105106model = " mistral-large-latest"
106107
107108client = Mistral(api_key = api_key)
108109
109- messages = [
110+ messages = [
110111 {
111112 " role" : " user" ,
112113 " content" : " What is the best French cheese?" ,
113114 },
114- ]
115+ ]
116+ # Or using the new message classes
117+ # messages = [
118+ # UserMessage(content="What is the best French cheese?"),
119+ # ]
115120
116121chat_response = client.chat.complete(
117- model = model,
118- messages = messages,
122+ model = model,
123+ messages = messages,
119124)
120125
121126print (chat_response.choices[0 ].message.content)
@@ -146,6 +151,8 @@ for chunk in stream_response:
146151```
147152** New:**
148153``` python
154+ import os
155+
149156from mistralai import Mistral, UserMessage
150157
151158api_key = os.environ[" MISTRAL_API_KEY" ]
@@ -159,14 +166,19 @@ messages = [
159166 " content" : " What is the best French cheese?" ,
160167 },
161168]
169+ # Or using the new message classes
170+ # messages = [
171+ # UserMessage(content="What is the best French cheese?"),
172+ # ]
162173
163174stream_response = client.chat.stream(
164- model = model,
165- messages = messages,
175+ model = model,
176+ messages = messages,
166177)
167178
168179for chunk in stream_response:
169180 print (chunk.data.choices[0 ].delta.content)
181+
170182```
171183
172184### Example 3: Async
@@ -194,23 +206,37 @@ async for chunk in async_response:
194206
195207** New:**
196208``` python
197- from mistralai import Mistral, UserMessage
198-
199- api_key = os.environ[" MISTRAL_API_KEY" ]
200- model = " mistral-large-latest"
201-
202- client = Mistral(api_key = api_key)
209+ import asyncio
210+ import os
203211
204- messages = [
205- {
206- " role" : " user" ,
207- " content" : " What is the best French cheese?" ,
208- },
209- ]
212+ from mistralai import Mistral, UserMessage
210213
211- # With async
212- async_response = await client.chat.stream_async(model = model, messages = messages)
213214
214- async for chunk in async_response:
215- print (chunk.data.choices[0 ].delta.content)
215+ async def main ():
216+ client = Mistral(
217+ api_key = os.getenv(" MISTRAL_API_KEY" , " " ),
218+ )
219+
220+ messages = [
221+ {
222+ " role" : " user" ,
223+ " content" : " What is the best French cheese?" ,
224+ },
225+ ]
226+ # Or using the new message classes
227+ # messages = [
228+ # UserMessage(
229+ # content="What is the best French cheese?",
230+ # ),
231+ # ]
232+ async_response = await client.chat.completstream_asynce_async(
233+ messages = messages,
234+ model = " mistral-large-latest" ,
235+ )
236+
237+ async for chunk in async_response:
238+ print (chunk.data.choices[0 ].delta.content)
239+
240+
241+ asyncio.run(main())
216242```
0 commit comments