@@ -25,9 +25,10 @@ import os
25
25
from azure.ai.inference import ChatCompletionsClient
26
26
from azure.identity import DefaultAzureCredential
27
27
28
- model = ChatCompletionsClient(
28
+ client = ChatCompletionsClient(
29
29
endpoint = " https://<resource>.services.ai.azure.com/models" ,
30
30
credential = DefaultAzureCredential(),
31
+ credential_scopes = [" https://cognitiveservices.azure.com/.default" ],
31
32
model = " mistral-large-2407" ,
32
33
)
33
34
```
@@ -47,10 +48,13 @@ import ModelClient from "@azure-rest/ai-inference";
47
48
import { isUnexpected } from " @azure-rest/ai-inference" ;
48
49
import { DefaultAzureCredential } from " @azure/identity" ;
49
50
51
+ const clientOptions = { credentials: { " https://cognitiveservices.azure.com" } };
52
+
50
53
const client = new ModelClient (
51
54
" https://<resource>.services.ai.azure.com/models" ,
52
55
new DefaultAzureCredential (),
53
- " mistral-large-2407"
56
+ " mistral-large-2407" ,
57
+ clientOptions,
54
58
);
55
59
```
56
60
@@ -79,10 +83,16 @@ using Azure.AI.Inference;
79
83
Then, you can use the package to consume the model. The following example shows how to create a client to consume chat completions with Entra ID:
80
84
81
85
``` csharp
86
+ var credential = new DefaultAzureCredential ();
87
+ AzureAIInferenceClientOptions clientOptions = new AzureAIInferenceClientOptions ();
88
+ BearerTokenAuthenticationPolicy tokenPolicy = new BearerTokenAuthenticationPolicy (credential , new string [] { " https://cognitiveservices.azure.com/.default" });
89
+ clientOptions .AddPolicy (tokenPolicy , HttpPipelinePosition .PerRetry );
90
+
82
91
ChatCompletionsClient client = new ChatCompletionsClient (
83
92
new Uri (" https://<resource>.services.ai.azure.com/models" ),
84
- new DefaultAzureCredential (includeInteractiveCredentials : true ),
85
- " mistral-large-2407"
93
+ credential ,
94
+ " mistral-large-2407" ,
95
+ clientOptions .
86
96
);
87
97
```
88
98
@@ -106,8 +116,9 @@ Add the package to your project:
106
116
Then, you can use the package to consume the model. The following example shows how to create a client to consume chat completions:
107
117
108
118
``` java
119
+ TokenCredential defaultCredential = new DefaultAzureCredentialBuilder (). build();
109
120
ChatCompletionsClient client = new ChatCompletionsClientBuilder ()
110
- .credential(new DefaultAzureCredential ()) )
121
+ .credential(defaultCredential )
111
122
.endpoint(" https://<resource>.services.ai.azure.com/models" )
112
123
.model(" mistral-large-2407" )
113
124
.buildClient();
@@ -127,6 +138,8 @@ Authorization: Bearer <bearer-token>
127
138
Content-Type: application/json
128
139
```
129
140
141
+ Tokens have to be issued with scope ` https://cognitiveservices.azure.com/.default ` .
142
+
130
143
For testing purposes, the easiest way to get a valid token for your user account is to use the Azure CLI. In a console, run the following Azure CLI command:
131
144
132
145
``` azurecli
0 commit comments