|
| 1 | +--- |
| 2 | +title: Azure OpenAI C# support |
| 3 | +titleSuffix: Azure OpenAI Service |
| 4 | +description: Azure OpenAI C# support |
| 5 | +manager: nitinme |
| 6 | +ms.service: azure-ai-openai |
| 7 | +ms.topic: include |
| 8 | +ms.date: 11/18/2024 |
| 9 | +--- |
| 10 | + |
| 11 | + |
| 12 | +The Azure OpenAI client library for .NET is a companion to the [official OpenAI client library for .NET](https://github.com/openai/openai-dotnet). The Azure OpenAI library configures a client for use with Azure OpenAI and provides additional strongly typed extension support for request and response models specific to Azure OpenAI scenarios. |
| 13 | + |
| 14 | +### Stable release: |
| 15 | + |
| 16 | +[Source code](https://github.com/Azure/azure-sdk-for-net/blob/Azure.AI.OpenAI_2.0.0/sdk/openai/Azure.AI.OpenAI/src) | [Package (NuGet)](https://www.nuget.org/packages/Azure.AI.OpenAI) | [Package reference documentation](/dotnet/api/overview/azure/ai.openai-readme?view=azure-dotnet&preserve-view=true) [API reference documentation](../../reference.md) | [Samples](https://github.com/Azure/azure-sdk-for-net/blob/Azure.AI.OpenAI_2.0.0/sdk/openai/Azure.AI.OpenAI/tests/Samples) |
| 17 | + |
| 18 | +### Preview release: |
| 19 | + |
| 20 | +The preview release will have access to the latest features. |
| 21 | + |
| 22 | +[Source code](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.OpenAI_2.1.0-beta.2/sdk/openai/Azure.AI.OpenAI/src) | [Package (NuGet)](https://www.nuget.org/packages/Azure.AI.OpenAI/2.1.0-beta.2) | [API reference documentation](../../reference.md) | [Package reference documentation](/dotnet/api/overview/azure/ai.openai-readme?view=azure-dotnet-preview&preserve-view=true) [Samples](https://github.com/Azure/azure-sdk-for-net/tree/Azure.AI.OpenAI_2.1.0-beta.2/sdk/openai/Azure.AI.OpenAI/tests/Samples) |
| 23 | + |
| 24 | + |
| 25 | +## Azure OpenAI API version support |
| 26 | + |
| 27 | +Unlike the Azure OpenAI client libraries for Python and JavaScript, the Azure OpenAI .NET package is limited to targeting a specific subset of the Azure OpenAI API versions. Generally each Azure OpenAI .NET package will unlock access to newer Azure OpenAI API release features. Having access to the latest API versions impacts feature availability. |
| 28 | + |
| 29 | +Version selection is controlled by the [`AzureOpenAIClientOptions.ServiceVersion`](/dotnet/api/azure.ai.openai.azureopenaiclientoptions.serviceversion?view=azure-dotnet&preserve-view=true) enum. |
| 30 | + |
| 31 | +The [stable release](/dotnet/api/azure.ai.openai.azureopenaiclientoptions.serviceversion?view=azure-dotnet&preserve-view=true) currently targets: |
| 32 | + |
| 33 | +`2024-06-01` |
| 34 | + |
| 35 | +The [preview release](/dotnet/api/azure.ai.openai.azureopenaiclientoptions.serviceversion?view=azure-dotnet-preview&preserve-view=true) can currently target: |
| 36 | + |
| 37 | +- `2024-06-01` |
| 38 | +- `2024-08-01-preview` |
| 39 | +- `2024-09-01-preview` |
| 40 | +- `2024-10-01-preview` |
| 41 | + |
| 42 | +## Installation |
| 43 | + |
| 44 | +```dotnetcli |
| 45 | +dotnet add package Azure.AI.OpenAI --prerelease |
| 46 | +``` |
| 47 | + |
| 48 | +The `Azure.AI.OpenAI` package builds on the [official OpenAI package](https://www.nuget.org/packages/OpenAI), which is included as a dependency. |
| 49 | + |
| 50 | +## Authentication |
| 51 | + |
| 52 | +To interact with Azure OpenAI or OpenAI, create an instance of [`AzureOpenAIClient`](/dotnet/api/azure.ai.openai.azureopenaiclient?view=azure-dotnet-preview&preserve-view=true) with one of the following approaches: |
| 53 | + |
| 54 | +# [Microsoft Entra ID](#tab/dotnet-secure) |
| 55 | + |
| 56 | +A secure, keyless authentication approach is to use Microsoft Entra ID (formerly Azure Active Directory) via the [Azure Identity library](/dotnet/api/overview/azure/identity-readme?view=azure-dotnet&preserve-view=true ). To use the library: |
| 57 | + |
| 58 | +```dotnetcli |
| 59 | +dotnet add package Azure.Identity |
| 60 | +``` |
| 61 | + |
| 62 | +Use the desired credential type from the library. For example, [`DefaultAzureCredential`](/dotnet/api/azure.identity.defaultazurecredential?view=azure-dotnet&preserve-view=true): |
| 63 | + |
| 64 | +```csharp |
| 65 | +AzureOpenAIClient azureClient = new( |
| 66 | + new Uri("https://your-azure-openai-resource.com"), |
| 67 | + new DefaultAzureCredential()); |
| 68 | +ChatClient chatClient = azureClient.GetChatClient("my-gpt-4o-mini-deployment"); |
| 69 | +``` |
| 70 | + |
| 71 | +# [API Key](#tab/dotnet-key) |
| 72 | + |
| 73 | +```csharp |
| 74 | +string keyFromEnvironment = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY"); |
| 75 | + |
| 76 | +AzureOpenAIClient azureClient = new( |
| 77 | + new Uri("https://your-azure-openai-resource.com"), |
| 78 | + new ApiKeyCredential(keyFromEnvironment)); |
| 79 | +ChatClient chatClient = azureClient.GetChatClient("my-gpt-35-turbo-deployment"); |
| 80 | +``` |
| 81 | + |
| 82 | +--- |
| 83 | + |
| 84 | +## Audio |
| 85 | + |
| 86 | +[`AzureOpenAIClient.GetAudioClient`](/dotnet/api/azure.ai.openai.azureopenaiclient.getaudioclient?view=azure-dotnet-preview&preserve-view=true ) |
| 87 | + |
| 88 | +### Transcription |
| 89 | + |
| 90 | +```csharp |
| 91 | +AzureOpenAIClient azureClient = new( |
| 92 | + new Uri("https://your-azure-openai-resource.com"), |
| 93 | + new DefaultAzureCredential()); |
| 94 | + |
| 95 | +AudioClient client = azureClient.GetAudioClient("whisper"); |
| 96 | + |
| 97 | +string audioFilePath = Path.Combine("Assets", "speech.mp3"); |
| 98 | + |
| 99 | +AudioTranscriptionOptions options = new() |
| 100 | +{ |
| 101 | + ResponseFormat = AudioTranscriptionFormat.Verbose, |
| 102 | + TimestampGranularities = AudioTimestampGranularities.Word | AudioTimestampGranularities.Segment, |
| 103 | +}; |
| 104 | + |
| 105 | +AudioTranscription transcription = client.TranscribeAudio(audioFilePath, options); |
| 106 | + |
| 107 | +Console.WriteLine("Transcription:"); |
| 108 | +Console.WriteLine($"{transcription.Text}"); |
| 109 | + |
| 110 | +Console.WriteLine(); |
| 111 | +Console.WriteLine($"Words:"); |
| 112 | +foreach (TranscribedWord word in transcription.Words) |
| 113 | +{ |
| 114 | + Console.WriteLine($" {word.Word,15} : {word.StartTime.TotalMilliseconds,5:0} - {word.EndTime.TotalMilliseconds,5:0}"); |
| 115 | +} |
| 116 | + |
| 117 | +Console.WriteLine(); |
| 118 | +Console.WriteLine($"Segments:"); |
| 119 | +foreach (TranscribedSegment segment in transcription.Segments) |
| 120 | +{ |
| 121 | + Console.WriteLine($" {segment.Text,90} : {segment.StartTime.TotalMilliseconds,5:0} - {segment.EndTime.TotalMilliseconds,5:0}"); |
| 122 | +} |
| 123 | +``` |
| 124 | + |
| 125 | +### Text to Speech (TTS) |
| 126 | + |
| 127 | +```csharp |
| 128 | +using Azure.AI.OpenAI; |
| 129 | +using Azure.Identity; |
| 130 | +using OpenAI.Audio; |
| 131 | + |
| 132 | +AzureOpenAIClient azureClient = new( |
| 133 | + new Uri("https://your-azure-openai-resource.com"), |
| 134 | + new DefaultAzureCredential()); |
| 135 | + |
| 136 | +AudioClient client = azureClient.GetAudioClient("tts-hd"); //Replace with your Azure OpenAI model deployment |
| 137 | +
|
| 138 | +string input = "Testing, testing, 1, 2, 3"; |
| 139 | + |
| 140 | +BinaryData speech = client.GenerateSpeech(input, GeneratedSpeechVoice.Alloy); |
| 141 | + |
| 142 | +using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.mp3"); |
| 143 | +speech.ToStream().CopyTo(stream); |
| 144 | +``` |
| 145 | + |
| 146 | +## Chat |
| 147 | + |
| 148 | +[`AzureOpenAIClient.GetChatClient`](/dotnet/api/azure.ai.openai.azureopenaiclient.getchatclient?view=azure-dotnet-preview&preserve-view=true) |
| 149 | + |
| 150 | +```csharp |
| 151 | +AzureOpenAIClient azureClient = new( |
| 152 | + new Uri("https://your-azure-openai-resource.com"), |
| 153 | + new DefaultAzureCredential()); |
| 154 | +ChatClient chatClient = azureClient.GetChatClient("my-gpt-4o-deployment"); |
| 155 | + |
| 156 | +ChatCompletion completion = chatClient.CompleteChat( |
| 157 | + [ |
| 158 | + // System messages represent instructions or other guidance about how the assistant should behave |
| 159 | + new SystemChatMessage("You are a helpful assistant that talks like a pirate."), |
| 160 | + // User messages represent user input, whether historical or the most recent input |
| 161 | + new UserChatMessage("Hi, can you help me?"), |
| 162 | + // Assistant messages in a request represent conversation history for responses |
| 163 | + new AssistantChatMessage("Arrr! Of course, me hearty! What can I do for ye?"), |
| 164 | + new UserChatMessage("What's the best way to train a parrot?"), |
| 165 | + ]); |
| 166 | + |
| 167 | +Console.WriteLine($"{completion.Role}: {completion.Content[0].Text}"); |
| 168 | +``` |
| 169 | + |
| 170 | +### Stream chat messages |
| 171 | + |
| 172 | +Streaming chat completions use the `CompleteChatStreaming` and `CompleteChatStreamingAsync` method, which return a `ResultCollection<StreamingChatCompletionUpdate>` or `AsyncCollectionResult<StreamingChatCompletionUpdate>` instead of a `ClientResult<ChatCompletion>`. |
| 173 | + |
| 174 | +These result collections can be iterated over using foreach or await foreach, with each update arriving as new data is available from the streamed response. |
| 175 | + |
| 176 | +```csharp |
| 177 | +AzureOpenAIClient azureClient = new( |
| 178 | + new Uri("https://your-azure-openai-resource.com"), |
| 179 | + new DefaultAzureCredential()); |
| 180 | +ChatClient chatClient = azureClient.GetChatClient("my-gpt-4o-deployment"); |
| 181 | + |
| 182 | +CollectionResult<StreamingChatCompletionUpdate> completionUpdates = chatClient.CompleteChatStreaming( |
| 183 | + [ |
| 184 | + new SystemChatMessage("You are a helpful assistant that talks like a pirate."), |
| 185 | + new UserChatMessage("Hi, can you help me?"), |
| 186 | + new AssistantChatMessage("Arrr! Of course, me hearty! What can I do for ye?"), |
| 187 | + new UserChatMessage("What's the best way to train a parrot?"), |
| 188 | + ]); |
| 189 | + |
| 190 | +foreach (StreamingChatCompletionUpdate completionUpdate in completionUpdates) |
| 191 | +{ |
| 192 | + foreach (ChatMessageContentPart contentPart in completionUpdate.ContentUpdate) |
| 193 | + { |
| 194 | + Console.Write(contentPart.Text); |
| 195 | + } |
| 196 | +} |
| 197 | +``` |
| 198 | + |
| 199 | +## Embeddings |
| 200 | + |
| 201 | +[`AzureOpenAIClient.GetEmbeddingClient`](/dotnet/api/azure.ai.openai.azureopenaiclient.getembeddingclient?view=azure-dotnet-preview&preserve-view=true) |
| 202 | + |
| 203 | +```csharp |
| 204 | +using Azure.AI.OpenAI; |
| 205 | +using Azure.Identity; |
| 206 | +using OpenAI.Embeddings; |
| 207 | + |
| 208 | +AzureOpenAIClient azureClient = new( |
| 209 | + new Uri("https://your-azure-openai-resource.com"), |
| 210 | + new DefaultAzureCredential()); |
| 211 | + |
| 212 | +EmbeddingClient client = azureClient.GetEmbeddingClient("text-embedding-3-large"); //Replace with your model deployment name |
| 213 | +
|
| 214 | +string description = "This is a test embedding"; |
| 215 | + |
| 216 | +OpenAIEmbedding embedding = client.GenerateEmbedding(description); |
| 217 | +ReadOnlyMemory<float> vector = embedding.ToFloats(); |
| 218 | + |
| 219 | +Console.WriteLine(string.Join(", ", vector.ToArray())); |
| 220 | +``` |
| 221 | + |
| 222 | +## Fine-tuning |
| 223 | + |
| 224 | +Currently not supported with the Azure OpenAI .NET packages. |
| 225 | + |
| 226 | +## Batch |
| 227 | + |
| 228 | +Currently not supported with the Azure OpenAI .NET packages. |
| 229 | + |
| 230 | +## Images |
| 231 | + |
| 232 | +[`AzureOpenAIClient.GetImageClient`](/dotnet/api/azure.ai.openai.azureopenaiclient.getimageclient?view=azure-dotnet-preview&preserve-view=true) |
| 233 | + |
| 234 | +```csharp |
| 235 | +using Azure.AI.OpenAI; |
| 236 | +using Azure.Identity; |
| 237 | +using OpenAI.Images; |
| 238 | + |
| 239 | +AzureOpenAIClient azureClient = new( |
| 240 | + new Uri("https://your-azure-openai-resource.com"), |
| 241 | + new DefaultAzureCredential()); |
| 242 | + |
| 243 | +ImageClient client = azureClient.GetImageClient("dall-e-3"); // replace with your model deployment name. |
| 244 | +
|
| 245 | +string prompt = "A rabbit eating pancakes."; |
| 246 | + |
| 247 | +ImageGenerationOptions options = new() |
| 248 | +{ |
| 249 | + Quality = GeneratedImageQuality.High, |
| 250 | + Size = GeneratedImageSize.W1792xH1024, |
| 251 | + Style = GeneratedImageStyle.Vivid, |
| 252 | + ResponseFormat = GeneratedImageFormat.Bytes |
| 253 | +}; |
| 254 | + |
| 255 | +GeneratedImage image = client.GenerateImage(prompt, options); |
| 256 | +BinaryData bytes = image.ImageBytes; |
| 257 | + |
| 258 | +using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png"); |
| 259 | +bytes.ToStream().CopyTo(stream); |
| 260 | + |
| 261 | +``` |
| 262 | + |
| 263 | +- [C# DALL-E quickstart guide](/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line%2Cjavascript-keyless%2Ctypescript-keyless&pivots=programming-language-csharp) |
| 264 | + |
| 265 | + |
| 266 | +## Completions (legacy) |
| 267 | + |
| 268 | +Not supported with the Azure OpenAI .NET packages. |
| 269 | + |
| 270 | + |
| 271 | +## Error handling |
| 272 | + |
| 273 | +### Error codes |
| 274 | + |
| 275 | +| Status Code | Error Type | |
| 276 | +|----|---| |
| 277 | +| 400 | `Bad Request Error` | |
| 278 | +| 401 | `Authentication Error` | |
| 279 | +| 403 | `Permission Denied Error` | |
| 280 | +| 404 | `Not Found Error` | |
| 281 | +| 422 | `Unprocessable Entity Error` | |
| 282 | +| 429 | `Rate Limit Error` | |
| 283 | +| 500 | `Internal Server Error` | |
| 284 | +| 503 | `Service Unavailable` | |
| 285 | +| 504 | `Gateway Timeout` | |
| 286 | + |
| 287 | +### Retries |
| 288 | + |
| 289 | +The client classes will automatically retry the following errors up to three additional times using exponential backoff: |
| 290 | + |
| 291 | +- 408 Request Timeout |
| 292 | +- 429 Too Many Requests |
| 293 | +- 500 Internal Server Error |
| 294 | +- 502 Bad Gateway |
| 295 | +- 503 Service Unavailable |
| 296 | +- 504 Gateway Timeout |
| 297 | + |
| 298 | + |
0 commit comments