diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 57bff3c5d..fb5b939d0 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -1056,6 +1056,7 @@ public class AudioClient { public AudioClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public AudioClient(string model, ApiKeyCredential credential); public AudioClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateSpeech(BinaryContent content, RequestOptions options = null); public virtual ClientResult GenerateSpeech(string text, GeneratedSpeechVoice voice, SpeechGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -1403,6 +1404,7 @@ public class ChatClient { public ChatClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ChatClient(string model, ApiKeyCredential credential); public ChatClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult CompleteChat(params ChatMessage[] messages); public virtual ClientResult CompleteChat(BinaryContent content, RequestOptions options = null); @@ -2242,6 +2244,7 @@ public class EmbeddingClient { public EmbeddingClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public EmbeddingClient(string model, ApiKeyCredential credential); public EmbeddingClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateEmbedding(string input, EmbeddingGenerationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> GenerateEmbeddingAsync(string input, EmbeddingGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -3204,6 +3207,7 @@ public class ImageClient { public ImageClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ImageClient(string model, ApiKeyCredential credential); public ImageClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateImage(string prompt, ImageGenerationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> GenerateImageAsync(string prompt, ImageGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -3390,6 +3394,7 @@ public class ModerationClient { public ModerationClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ModerationClient(string model, ApiKeyCredential credential); public ModerationClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult ClassifyText(BinaryContent content, RequestOptions options = null); public virtual ClientResult ClassifyText(IEnumerable inputs, CancellationToken cancellationToken = default); @@ -4570,6 +4575,13 @@ public class OpenAIResponseClient { public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); } + public static class OpenAIResponsesModelFactory { + public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); + public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary metadata = null, float? temperature = null, float? topP = null, string previousResponseId = null, bool? background = null, string instructions = null, IEnumerable tools = null); + public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); + public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null); + public static ReferenceResponseItem ReferenceResponseItem(string id = null); + } [Experimental("OPENAI001")] public class ReasoningResponseItem : ResponseItem, IJsonModel, IPersistableModel { public ReasoningResponseItem(IEnumerable summaryParts); diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index 10c9b626b..c9f597698 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -967,6 +967,7 @@ public class AudioClient { public AudioClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public AudioClient(string model, ApiKeyCredential credential); public AudioClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateSpeech(BinaryContent content, RequestOptions options = null); public virtual ClientResult GenerateSpeech(string text, GeneratedSpeechVoice voice, SpeechGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -1266,6 +1267,7 @@ public class ChatClient { public ChatClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ChatClient(string model, ApiKeyCredential credential); public ChatClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult CompleteChat(params ChatMessage[] messages); public virtual ClientResult CompleteChat(BinaryContent content, RequestOptions options = null); @@ -1951,6 +1953,7 @@ public class EmbeddingClient { public EmbeddingClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public EmbeddingClient(string model, ApiKeyCredential credential); public EmbeddingClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateEmbedding(string input, EmbeddingGenerationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> GenerateEmbeddingAsync(string input, EmbeddingGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -2811,6 +2814,7 @@ public class ImageClient { public ImageClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ImageClient(string model, ApiKeyCredential credential); public ImageClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult GenerateImage(string prompt, ImageGenerationOptions options = null, CancellationToken cancellationToken = default); public virtual Task> GenerateImageAsync(string prompt, ImageGenerationOptions options = null, CancellationToken cancellationToken = default); @@ -2966,6 +2970,7 @@ public class ModerationClient { public ModerationClient(string model, ApiKeyCredential credential, OpenAIClientOptions options); public ModerationClient(string model, ApiKeyCredential credential); public ModerationClient(string model, string apiKey); + public string Model { get; } public ClientPipeline Pipeline { get; } public virtual ClientResult ClassifyText(BinaryContent content, RequestOptions options = null); public virtual ClientResult ClassifyText(IEnumerable inputs, CancellationToken cancellationToken = default); @@ -4052,6 +4057,13 @@ public class OpenAIResponseClient { public virtual CollectionResult GetResponseStreaming(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); public virtual AsyncCollectionResult GetResponseStreamingAsync(string responseId, int? startingAfter = null, CancellationToken cancellationToken = default); } + public static class OpenAIResponsesModelFactory { + public static MessageResponseItem MessageResponseItem(string id = null, MessageRole role = MessageRole.Assistant, MessageStatus? status = null); + public static OpenAIResponse OpenAIResponse(string id = null, DateTimeOffset createdAt = default, ResponseStatus? status = null, ResponseError error = null, ResponseTokenUsage usage = null, string endUserId = null, ResponseReasoningOptions reasoningOptions = null, int? maxOutputTokenCount = null, ResponseTextOptions textOptions = null, ResponseTruncationMode? truncationMode = null, ResponseIncompleteStatusDetails incompleteStatusDetails = null, IEnumerable outputItems = null, bool parallelToolCallsEnabled = false, ResponseToolChoice toolChoice = null, string model = null, IDictionary metadata = null, float? temperature = null, float? topP = null, string previousResponseId = null, bool? background = null, string instructions = null, IEnumerable tools = null); + public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, IEnumerable summaryParts = null); + public static ReasoningResponseItem ReasoningResponseItem(string id = null, string encryptedContent = null, ReasoningStatus? status = null, string summaryText = null); + public static ReferenceResponseItem ReferenceResponseItem(string id = null); + } public class ReasoningResponseItem : ResponseItem, IJsonModel, IPersistableModel { public ReasoningResponseItem(IEnumerable summaryParts); public ReasoningResponseItem(string summaryText); diff --git a/src/Custom/Responses/Items/ReasoningResponseItem.cs b/src/Custom/Responses/Items/ReasoningResponseItem.cs index 662ee2b4a..a84d7f0d7 100644 --- a/src/Custom/Responses/Items/ReasoningResponseItem.cs +++ b/src/Custom/Responses/Items/ReasoningResponseItem.cs @@ -1,5 +1,4 @@ using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; using System.Linq; namespace OpenAI.Responses; @@ -12,7 +11,7 @@ public partial class ReasoningResponseItem { // CUSTOM: Retain optionality of OpenAPI read-only property value [CodeGenMember("Status")] - public ReasoningStatus? Status { get; } + public ReasoningStatus? Status { get; internal set; } // CUSTOM: Rename for collection clarity [CodeGenMember("Summary")] diff --git a/src/Custom/Responses/OpenAIResponsesModelFactory.cs b/src/Custom/Responses/OpenAIResponsesModelFactory.cs new file mode 100644 index 000000000..42cdbd292 --- /dev/null +++ b/src/Custom/Responses/OpenAIResponsesModelFactory.cs @@ -0,0 +1,148 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; + +namespace OpenAI.Responses; + +/// Model factory for models. +[Experimental("OPENAI001")] +public static partial class OpenAIResponsesModelFactory +{ + /// Initializes a new instance of . + /// A new instance for mocking. + public static OpenAIResponse OpenAIResponse( + string id = null, + DateTimeOffset createdAt = default, + ResponseStatus? status = null, + ResponseError error = null, + ResponseTokenUsage usage = null, + string endUserId = null, + ResponseReasoningOptions reasoningOptions = null, + int? maxOutputTokenCount = null, + ResponseTextOptions textOptions = null, + ResponseTruncationMode? truncationMode = null, + ResponseIncompleteStatusDetails incompleteStatusDetails = null, + IEnumerable outputItems = null, + bool parallelToolCallsEnabled = default, + ResponseToolChoice toolChoice = null, + string model = null, + IDictionary metadata = null, + float? temperature = null, + float? topP = null, + string previousResponseId = null, + bool? background = null, + string instructions = null, + IEnumerable tools = null) + { + outputItems ??= new List(); + tools ??= new List(); + metadata ??= new Dictionary(); + + return new OpenAIResponse( + metadata: metadata, + temperature: temperature, + topP: topP, + serviceTier: null, + previousResponseId: previousResponseId, + background: background, + instructions: instructions, + tools: tools.ToList(), + id: id, + status: status, + createdAt: createdAt, + error: error, + usage: usage, + endUserId: endUserId, + reasoningOptions: reasoningOptions, + maxOutputTokenCount: maxOutputTokenCount, + textOptions: textOptions, + truncationMode: truncationMode, + incompleteStatusDetails: incompleteStatusDetails, + outputItems: outputItems.ToList(), + parallelToolCallsEnabled: parallelToolCallsEnabled, + toolChoice: toolChoice, + model: model, + @object: "response", + additionalBinaryDataProperties: null); + } + + /// Initializes a new instance of . + /// A new instance for mocking. + public static MessageResponseItem MessageResponseItem( + string id = null, + MessageRole role = MessageRole.Assistant, + MessageStatus? status = null) + { + // Convert the public MessageRole to the internal role type + InternalResponsesMessageRole internalRole = role.ToSerialString(); + + return new MessageResponseItem( + id: id, + internalRole: internalRole, + status: status); + } + + /// Initializes a new instance of . + /// The ID of the reasoning response item. + /// The encrypted reasoning content. + /// The status of the reasoning response item. + /// The collection of summary parts. + /// A new instance for mocking. + public static ReasoningResponseItem ReasoningResponseItem( + string id = null, + string encryptedContent = null, + ReasoningStatus? status = null, + IEnumerable summaryParts = null) + { + summaryParts ??= new List(); + + var item = new ReasoningResponseItem( + kind: InternalItemType.Reasoning, + id: id, + additionalBinaryDataProperties: null, + encryptedContent: encryptedContent, + summaryParts: summaryParts.ToList()); + + item.Status = status; + return item; + } + + /// Initializes a new instance of with summary text. + /// The ID of the reasoning response item. + /// The encrypted reasoning content. + /// The status of the reasoning response item. + /// The summary text to create a ReasoningSummaryTextPart from. + /// A new instance for mocking. + public static ReasoningResponseItem ReasoningResponseItem( + string id = null, + string encryptedContent = null, + ReasoningStatus? status = null, + string summaryText = null) + { + var summaryParts = !string.IsNullOrEmpty(summaryText) + ? new List { new ReasoningSummaryTextPart(summaryText) } + : new List(); + + var item = new ReasoningResponseItem( + kind: InternalItemType.Reasoning, + id: id, + additionalBinaryDataProperties: null, + encryptedContent: encryptedContent, + summaryParts: summaryParts); + + item.Status = status; + return item; + } + + /// Initializes a new instance of . + /// A new instance for mocking. + public static ReferenceResponseItem ReferenceResponseItem( + string id = null) + { + return new ReferenceResponseItem( + kind: InternalItemType.ItemReference, + id: id, + additionalBinaryDataProperties: null); + } +} \ No newline at end of file diff --git a/tests/Responses/OpenAIResponsesModelFactoryTests.cs b/tests/Responses/OpenAIResponsesModelFactoryTests.cs new file mode 100644 index 000000000..b1951358a --- /dev/null +++ b/tests/Responses/OpenAIResponsesModelFactoryTests.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using NUnit.Framework; +using OpenAI.Responses; + +namespace OpenAI.Tests.Responses; + +[Parallelizable(ParallelScope.All)] +[Category("Responses")] +[Category("Smoke")] +public partial class OpenAIResponsesModelFactoryTests +{ + [Test] + public void OpenAIResponseWorks() + { + string id = "response_123"; + DateTimeOffset createdAt = DateTimeOffset.UtcNow; + ResponseStatus status = ResponseStatus.Completed; + string model = "gpt-4o"; + IEnumerable outputItems = [ + OpenAIResponsesModelFactory.MessageResponseItem(id: "msg_1", role: MessageRole.User, status: MessageStatus.Completed), + OpenAIResponsesModelFactory.ReasoningResponseItem(id: "reason_1", encryptedContent: "encrypted", status: ReasoningStatus.InProgress, summaryText: "summary") + ]; + + OpenAIResponse response = OpenAIResponsesModelFactory.OpenAIResponse( + id: id, + createdAt: createdAt, + status: status, + model: model, + outputItems: outputItems); + + Assert.That(response.Id, Is.EqualTo(id)); + Assert.That(response.CreatedAt, Is.EqualTo(createdAt)); + Assert.That(response.Status, Is.EqualTo(status)); + Assert.That(response.Model, Is.EqualTo(model)); + Assert.That(response.OutputItems.SequenceEqual(outputItems), Is.True); + } + + [Test] + public void MessageResponseItemWorks() + { + string id = "message_123"; + MessageRole role = MessageRole.Developer; + MessageStatus status = MessageStatus.InProgress; + + MessageResponseItem messageItem = OpenAIResponsesModelFactory.MessageResponseItem( + id: id, + role: role, + status: status); + + Assert.That(messageItem.Id, Is.EqualTo(id)); + Assert.That(messageItem.Role, Is.EqualTo(role)); + Assert.That(messageItem.Status, Is.EqualTo(status)); + } + + [Test] + public void ReasoningResponseItemWorks() + { + string id = "reasoning_123"; + string encryptedContent = "encrypted_reasoning_data"; + ReasoningStatus status = ReasoningStatus.Completed; + var summaryParts = new List { new ReasoningSummaryTextPart("test summary") }; + + ReasoningResponseItem reasoningItem = OpenAIResponsesModelFactory.ReasoningResponseItem( + id: id, + encryptedContent: encryptedContent, + status: status, + summaryParts: summaryParts); + + Assert.That(reasoningItem.Id, Is.EqualTo(id)); + Assert.That(reasoningItem.EncryptedContent, Is.EqualTo(encryptedContent)); + Assert.That(reasoningItem.Status, Is.EqualTo(status)); + Assert.That(reasoningItem.SummaryParts.SequenceEqual(summaryParts), Is.True); + } + + [Test] + public void ReasoningResponseItemWithSummaryTextWorks() + { + string id = "reasoning_456"; + string encryptedContent = "encrypted_data"; + ReasoningStatus status = ReasoningStatus.InProgress; + string summaryText = "This is a reasoning summary"; + + ReasoningResponseItem reasoningItem = OpenAIResponsesModelFactory.ReasoningResponseItem( + id: id, + encryptedContent: encryptedContent, + status: status, + summaryText: summaryText); + + Assert.That(reasoningItem.Id, Is.EqualTo(id)); + Assert.That(reasoningItem.EncryptedContent, Is.EqualTo(encryptedContent)); + Assert.That(reasoningItem.Status, Is.EqualTo(status)); + Assert.That(reasoningItem.GetSummaryText(), Is.EqualTo(summaryText)); + } + + [Test] + public void ReferenceResponseItemWorks() + { + string id = "reference_123"; + + ReferenceResponseItem referenceItem = OpenAIResponsesModelFactory.ReferenceResponseItem(id: id); + + Assert.That(referenceItem.Id, Is.EqualTo(id)); + } +}