@@ -26,7 +26,6 @@ client = ChatCompletionsClient(
26
26
endpoint = " https://<resource>.services.ai.azure.com/models" ,
27
27
credential = DefaultAzureCredential(),
28
28
credential_scopes = [" https://cognitiveservices.azure.com/.default" ],
29
- model = " mistral-large-2407" ,
30
29
)
31
30
```
32
31
@@ -50,7 +49,6 @@ const clientOptions = { credentials: { "https://cognitiveservices.azure.com" } }
50
49
const client = new ModelClient (
51
50
" https://<resource>.services.ai.azure.com/models" ,
52
51
new DefaultAzureCredential (),
53
- " mistral-large-2407" ,
54
52
clientOptions,
55
53
);
56
54
```
@@ -80,15 +78,14 @@ using Azure.AI.Inference;
80
78
Then, you can use the package to consume the model. The following example shows how to create a client to consume chat completions with Entra ID:
81
79
82
80
``` csharp
83
- var credential = new DefaultAzureCredential ();
81
+ TokenCredential credential = new DefaultAzureCredential ();
84
82
AzureAIInferenceClientOptions clientOptions = new AzureAIInferenceClientOptions ();
85
83
BearerTokenAuthenticationPolicy tokenPolicy = new BearerTokenAuthenticationPolicy (credential , new string [] { " https://cognitiveservices.azure.com/.default" });
86
84
clientOptions .AddPolicy (tokenPolicy , HttpPipelinePosition .PerRetry );
87
85
88
86
ChatCompletionsClient client = new ChatCompletionsClient (
89
87
new Uri (" https://<resource>.services.ai.azure.com/models" ),
90
88
credential ,
91
- " mistral-large-2407" ,
92
89
clientOptions .
93
90
);
94
91
```
@@ -117,7 +114,6 @@ TokenCredential defaultCredential = new DefaultAzureCredentialBuilder().build();
117
114
ChatCompletionsClient client = new ChatCompletionsClientBuilder ()
118
115
.credential(defaultCredential)
119
116
.endpoint(" https://<resource>.services.ai.azure.com/models" )
120
- .model(" mistral-large-2407" )
121
117
.buildClient();
122
118
```
123
119
0 commit comments