Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Release History

## 2.2.0-beta.2 (Unreleased)

### Bugs fixed

- Fixed a bug that caused `CompleteChatStreaming*()` with audio-enabled models to fail unless provided a `ChatCompletionOptions` instance that had previously been used for a non-streaming `CompleteChat()*` call
- Addressed an issue that caused calls to `CompleteChatStreaming*()` to not report usage when provided a `ChatCompletionOptions` instance that was previously used in a non-streaming `CompleteChat*()` call
- Addressed issues with standalone serialization and deserialization of `ChatCompletionOptions` that impacted the ability to manipulate chat completion requests via `System.ClientModel.Primitives.ModelReaderWriter` and related utilities

## 2.2.0-beta.1 (2025-02-07)

### Features added
Expand Down
3,181 changes: 3,181 additions & 0 deletions api/OpenAI.net8.0.cs

Large diffs are not rendered by default.

136 changes: 12 additions & 124 deletions api/OpenAI.netstandard2.0.cs

Large diffs are not rendered by default.

17 changes: 13 additions & 4 deletions src/Custom/Chat/ChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -251,9 +251,18 @@ private void CreateChatCompletionOptions(IEnumerable<ChatMessage> messages, ref
{
options.Messages = messages.ToList();
options.Model = _model;
options.Stream = stream
? true
: null;
options.StreamOptions = stream ? options.StreamOptions : null;
if (stream)
{
options.Stream = true;
options.StreamOptions = s_includeUsageStreamOptions;
}
else
{
options.Stream = null;
options.StreamOptions = null;
}
}

private static readonly InternalChatCompletionStreamOptions s_includeUsageStreamOptions
= new(includeUsage: true, additionalBinaryDataProperties: null);
}
15 changes: 4 additions & 11 deletions src/Custom/Chat/ChatCompletionOptions.Serialization.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,12 @@ public partial class ChatCompletionOptions
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void SerializeMessagesValue(Utf8JsonWriter writer, ModelReaderWriterOptions options)
{
if (Messages is not null)
{
writer.WriteStartArray();
foreach (var item in Messages)
{
writer.WriteObjectValue<ChatMessage>(item, options);
}
writer.WriteEndArray();
}
else
writer.WriteStartArray();
foreach (var item in Messages)
{
writer.WriteNullValue();
writer.WriteObjectValue<ChatMessage>(item, options);
}
writer.WriteEndArray();
}

// CUSTOM: Added custom serialization to treat a single string as a collection of strings with one item.
Expand Down
60 changes: 26 additions & 34 deletions src/Custom/Chat/ChatCompletionOptions.cs
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
using System;
using System.ClientModel.Primitives;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Threading;

namespace OpenAI.Chat;

/// <summary>
/// Request-level options for chat completion.
/// </summary>
[CodeGenModel("CreateChatCompletionRequest")]
[CodeGenSuppress("ChatCompletionOptions", typeof(IEnumerable<ChatMessage>), typeof(InternalCreateChatCompletionRequestModel))]
[CodeGenSuppress("ChatCompletionOptions", typeof(IEnumerable<ChatMessage>), typeof(InternalCreateChatCompletionRequestModel?))]
[CodeGenSerialization(nameof(Messages), SerializationValueHook = nameof(SerializeMessagesValue))]
[CodeGenSerialization(nameof(StopSequences), SerializationValueHook = nameof(SerializeStopSequencesValue), DeserializationValueHook = nameof(DeserializeStopSequencesValue))]
[CodeGenSerialization(nameof(LogitBiases), SerializationValueHook = nameof(SerializeLogitBiasesValue), DeserializationValueHook = nameof(DeserializeLogitBiasesValue))]
Expand All @@ -32,7 +34,7 @@ public partial class ChatCompletionOptions
/// ID of the model to use. See the <see href="https://platform.openai.com/docs/models/model-endpoint-compatibility">model endpoint compatibility</see> table for details on which models work with the Chat API.
/// </summary>
[CodeGenMember("Model")]
internal InternalCreateChatCompletionRequestModel Model { get; set; }
internal InternalCreateChatCompletionRequestModel? Model { get; set; }

// CUSTOM: Made internal. We only ever request a single choice.
/// <summary> How many chat completion choices to generate for each input message. Note that you will be charged based on the number of generated tokens across all of the choices. Keep `n` as `1` to minimize costs. </summary>
Expand All @@ -47,16 +49,18 @@ public partial class ChatCompletionOptions
/// <summary> Gets or sets the stream options. </summary>
[CodeGenMember("StreamOptions")]
internal InternalChatCompletionStreamOptions StreamOptions { get; set; }
= new() { IncludeUsage = true };

// CUSTOM: Made public now that there are no required properties.
/// <summary> Initializes a new instance of <see cref="ChatCompletionOptions"/> for deserialization. </summary>
public ChatCompletionOptions()
{
Messages = new ChangeTrackingList<ChatMessage>();
LogitBiases = new ChangeTrackingDictionary<int, int>();
StopSequences = new ChangeTrackingList<string>();
Tools = new ChangeTrackingList<ChatTool>();
Functions = new ChangeTrackingList<ChatFunction>();
InternalModalities = new ChangeTrackingList<InternalCreateChatCompletionRequestModality>();
Metadata = new ChangeTrackingDictionary<string, string>();
}

// CUSTOM: Renamed.
Expand Down Expand Up @@ -113,20 +117,6 @@ public ChatCompletionOptions()
[CodeGenMember("ParallelToolCalls")]
public bool? AllowParallelToolCalls { get; set; }

/// <summary>
/// An object specifying the format that the model must output.
/// </summary>
/// <remarks>
/// <p>
/// Compatible with GPT-4o, GPT-4o mini, GPT-4 Turbo and all GPT-3.5 Turbo models newer than gpt-3.5-turbo-1106.
/// </p>
/// <p>
/// Learn more in the Structured Outputs guide.
/// </p>
/// </remarks>
//[CodeGenMember("ResponseFormat")]
//public ChatResponseFormat ResponseFormat { get; set; }

[CodeGenMember("ServiceTier")]
internal InternalCreateChatCompletionRequestServiceTier? _serviceTier;

Expand Down Expand Up @@ -166,7 +156,7 @@ public ChatCompletionOptions()
/// <see href="https://platform.openai.com/chat-completions">OpenAI Platform dashboard</see>.
/// </summary>
[CodeGenMember("Metadata")]
public IDictionary<string, string> Metadata { get; } = new ChangeTrackingDictionary<string, string>();
public IDictionary<string, string> Metadata { get; }

// CUSTOM: Renamed.
/// <summary>
Expand All @@ -188,7 +178,16 @@ public ChatCompletionOptions()

// CUSTOM: Made internal for automatic enablement via audio options.
[CodeGenMember("Modalities")]
private IList<InternalCreateChatCompletionRequestModality> _internalModalities = new ChangeTrackingList<InternalCreateChatCompletionRequestModality>();
private IList<InternalCreateChatCompletionRequestModality> InternalModalities
{
get => _internalModalities;
set
{
_internalModalities = value;
_responseModalities = ChatResponseModalitiesExtensions.FromInternalModalities(value);
}
}
private IList<InternalCreateChatCompletionRequestModality> _internalModalities;

/// <summary>
/// Specifies the content types that the model should generate in its responses.
Expand All @@ -200,25 +199,18 @@ public ChatCompletionOptions()
/// </remarks>
public ChatResponseModalities ResponseModalities
{
get => ChatResponseModalitiesExtensions.FromInternalModalities(_internalModalities);
set => _internalModalities = value.ToInternalModalities();
}

// CUSTOM: supplemented with custom setter to internally enable audio output via modalities.
[CodeGenMember("Audio")]
private ChatAudioOptions _audioOptions;

public ChatAudioOptions AudioOptions
{
get => _audioOptions;
get => _responseModalities;
set
{
_audioOptions = value;
_internalModalities = value is null
? new ChangeTrackingList<InternalCreateChatCompletionRequestModality>()
: [InternalCreateChatCompletionRequestModality.Text, InternalCreateChatCompletionRequestModality.Audio];
_responseModalities = value;
_internalModalities = value.ToInternalModalities();
}
}
private ChatResponseModalities _responseModalities;

// CUSTOM: Renamed.
[CodeGenMember("Audio")]
public ChatAudioOptions AudioOptions { get; set; }

// CUSTOM: rename.
[CodeGenMember("Prediction")]
Expand Down
122 changes: 109 additions & 13 deletions src/Custom/Chat/OpenAIChatModelFactory.cs
Original file line number Diff line number Diff line change
@@ -1,12 +1,46 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;

namespace OpenAI.Chat;

/// <summary> Model factory for models. </summary>
public static partial class OpenAIChatModelFactory
{
/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatCompletion"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatCompletion"/> instance for mocking. </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
public static ChatCompletion ChatCompletion(
string id,
ChatFinishReason finishReason,
ChatMessageContent content,
string refusal,
IEnumerable<ChatToolCall> toolCalls,
ChatMessageRole role,
ChatFunctionCall functionCall,
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities,
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities,
DateTimeOffset createdAt,
string model,
string systemFingerprint,
ChatTokenUsage usage) =>
ChatCompletion(
id: id,
finishReason: finishReason,
content:content,
refusal: refusal,
toolCalls: toolCalls,
role: role,
functionCall: functionCall,
contentTokenLogProbabilities: contentTokenLogProbabilities,
refusalTokenLogProbabilities: refusalTokenLogProbabilities,
createdAt: createdAt,
model: model,
systemFingerprint: systemFingerprint,
usage: usage,
outputAudio: default);

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatCompletion"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatCompletion"/> instance for mocking. </returns>
public static ChatCompletion ChatCompletion(
Expand All @@ -16,14 +50,14 @@ public static ChatCompletion ChatCompletion(
string refusal = null,
IEnumerable<ChatToolCall> toolCalls = null,
ChatMessageRole role = default,
ChatFunctionCall functionCall = null,
ChatFunctionCall functionCall = default,
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities = null,
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities = null,
DateTimeOffset createdAt = default,
string model = null,
string systemFingerprint = null,
ChatTokenUsage usage = null,
ChatOutputAudio outputAudio = null)
ChatTokenUsage usage = default,
ChatOutputAudio outputAudio = default)
{
content ??= new ChatMessageContent();
toolCalls ??= new List<ChatToolCall>();
Expand Down Expand Up @@ -91,16 +125,27 @@ public static ChatTokenTopLogProbabilityDetails ChatTokenTopLogProbabilityDetail
additionalBinaryDataProperties: null);
}

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatTokenUsage"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatTokenUsage"/> instance for mocking. </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
public static ChatTokenUsage ChatTokenUsage(int outputTokenCount, int inputTokenCount, int totalTokenCount, ChatOutputTokenUsageDetails outputTokenDetails) =>
ChatTokenUsage(
outputTokenCount: outputTokenCount,
inputTokenCount: inputTokenCount,
totalTokenCount: totalTokenCount,
outputTokenDetails: outputTokenDetails,
inputTokenDetails: default);

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatTokenUsage"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatTokenUsage"/> instance for mocking. </returns>
public static ChatTokenUsage ChatTokenUsage(int outputTokenCount = default, int inputTokenCount = default, int totalTokenCount = default, ChatOutputTokenUsageDetails outputTokenDetails = null, ChatInputTokenUsageDetails inputTokenDetails = null)
{
return new ChatTokenUsage(
outputTokenCount,
inputTokenCount,
totalTokenCount,
outputTokenDetails,
inputTokenDetails,
outputTokenCount: outputTokenCount,
inputTokenCount: inputTokenCount,
totalTokenCount: totalTokenCount,
outputTokenDetails: outputTokenDetails,
inputTokenDetails: inputTokenDetails,
additionalBinaryDataProperties: null);
}

Expand All @@ -116,7 +161,25 @@ public static ChatInputTokenUsageDetails ChatInputTokenUsageDetails(int audioTok

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount = default, int audioTokenCount = default, int acceptedPredictionTokenCount = default, int rejectedPredictionTokenCount = 0)
[EditorBrowsable(EditorBrowsableState.Never)]
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount) =>
ChatOutputTokenUsageDetails(
reasoningTokenCount: reasoningTokenCount,
audioTokenCount: default);

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount, int audioTokenCount) =>
ChatOutputTokenUsageDetails(
reasoningTokenCount: reasoningTokenCount,
audioTokenCount: audioTokenCount,
acceptedPredictionTokenCount: default,
rejectedPredictionTokenCount: default);

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.ChatOutputTokenUsageDetails"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.ChatOutputTokenusageDetails"/> instance for mocking. </returns>
public static ChatOutputTokenUsageDetails ChatOutputTokenUsageDetails(int reasoningTokenCount = default, int audioTokenCount = default, int acceptedPredictionTokenCount = default, int rejectedPredictionTokenCount = default)
{
return new ChatOutputTokenUsageDetails(
audioTokenCount: audioTokenCount,
Expand All @@ -136,23 +199,56 @@ public static ChatOutputAudio ChatOutputAudio(BinaryData audioBytes, string id =
additionalBinaryDataProperties: null);
}

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/> instance for mocking. </returns>
[EditorBrowsable(EditorBrowsableState.Never)]
public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
string completionId,
ChatMessageContent contentUpdate,
StreamingChatFunctionCallUpdate functionCallUpdate,
IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates,
ChatMessageRole? role,
string refusalUpdate,
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities,
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities,
ChatFinishReason? finishReason,
DateTimeOffset createdAt,
string model,
string systemFingerprint,
ChatTokenUsage usage) =>
StreamingChatCompletionUpdate(
completionId: completionId,
contentUpdate: contentUpdate,
functionCallUpdate: functionCallUpdate,
toolCallUpdates: toolCallUpdates,
role: role,
refusalUpdate: refusalUpdate,
contentTokenLogProbabilities: contentTokenLogProbabilities,
refusalTokenLogProbabilities: refusalTokenLogProbabilities,
finishReason: finishReason,
createdAt: createdAt,
model: model,
systemFingerprint: systemFingerprint,
usage: usage,
outputAudioUpdate: default);

/// <summary> Initializes a new instance of <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/>. </summary>
/// <returns> A new <see cref="OpenAI.Chat.StreamingChatCompletionUpdate"/> instance for mocking. </returns>
public static StreamingChatCompletionUpdate StreamingChatCompletionUpdate(
string completionId = null,
ChatMessageContent contentUpdate = null,
StreamingChatFunctionCallUpdate functionCallUpdate = null,
IEnumerable<StreamingChatToolCallUpdate> toolCallUpdates = null,
ChatMessageRole? role = null,
ChatMessageRole? role = default,
string refusalUpdate = null,
IEnumerable<ChatTokenLogProbabilityDetails> contentTokenLogProbabilities = null,
IEnumerable<ChatTokenLogProbabilityDetails> refusalTokenLogProbabilities = null,
ChatFinishReason? finishReason = null,
ChatFinishReason? finishReason = default,
DateTimeOffset createdAt = default,
string model = null,
string systemFingerprint = null,
ChatTokenUsage usage = null,
StreamingChatOutputAudioUpdate outputAudioUpdate = null)
ChatTokenUsage usage = default,
StreamingChatOutputAudioUpdate outputAudioUpdate = default)
{
contentUpdate ??= new ChatMessageContent();
toolCallUpdates ??= new List<StreamingChatToolCallUpdate>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ public partial class StreamingChatCompletionUpdate
private IReadOnlyList<StreamingChatToolCallUpdate> _toolCallUpdates;
private IReadOnlyList<ChatTokenLogProbabilityDetails> _contentTokenLogProbabilities;
private IReadOnlyList<ChatTokenLogProbabilityDetails> _refusalTokenLogProbabilities;
internal InternalCreateChatCompletionStreamResponseChoice InternalChoice => (Choices.Count > 0) ? Choices[0] : null;
internal InternalCreateChatCompletionStreamResponseChoice InternalChoice => (Choices?.Count > 0) ? Choices[0] : null;
internal InternalChatCompletionStreamResponseDelta InternalChoiceDelta => InternalChoice?.Delta;

// CUSTOM:
Expand Down
Loading