diff --git a/api/OpenAI.net8.0.cs b/api/OpenAI.net8.0.cs index 6b8bc9a1b..aee0988c7 100644 --- a/api/OpenAI.net8.0.cs +++ b/api/OpenAI.net8.0.cs @@ -5499,7 +5499,20 @@ public class StreamingResponseRefusalDoneUpdate : StreamingResponseUpdate, IJson protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } [Experimental("OPENAI001")] - public class StreamingResponseTextAnnotationAddedUpdate { + public class StreamingResponseTextAnnotationAddedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { + public BinaryData Annotation { get; } + public int AnnotationIndex { get; } + public int ContentIndex { get; } + public string ItemId { get; } + public int OutputIndex { get; } + [Experimental("OPENAI001")] + protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + [Experimental("OPENAI001")] + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + [Experimental("OPENAI001")] + protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + [Experimental("OPENAI001")] + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } [Experimental("OPENAI001")] public class StreamingResponseUpdate : IJsonModel, IPersistableModel { diff --git a/api/OpenAI.netstandard2.0.cs b/api/OpenAI.netstandard2.0.cs index 8689cb397..e8b769613 100644 --- a/api/OpenAI.netstandard2.0.cs +++ b/api/OpenAI.netstandard2.0.cs @@ -4326,7 +4326,16 @@ public class StreamingResponseRefusalDoneUpdate : StreamingResponseUpdate, IJson protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } - public class StreamingResponseTextAnnotationAddedUpdate { + public class StreamingResponseTextAnnotationAddedUpdate : StreamingResponseUpdate, IJsonModel, IPersistableModel { + public BinaryData Annotation { get; } + public int AnnotationIndex { get; } + public int ContentIndex { get; } + public string ItemId { get; } + public int OutputIndex { get; } + protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options); + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options); + protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options); + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options); } public class StreamingResponseUpdate : IJsonModel, IPersistableModel { public int SequenceNumber { get; } diff --git a/src/Custom/Responses/Streaming/StreamingResponseTextAnnotationAddedUpdate.cs b/src/Custom/Responses/Streaming/StreamingResponseTextAnnotationAddedUpdate.cs index 662090315..0b7e27825 100644 --- a/src/Custom/Responses/Streaming/StreamingResponseTextAnnotationAddedUpdate.cs +++ b/src/Custom/Responses/Streaming/StreamingResponseTextAnnotationAddedUpdate.cs @@ -6,7 +6,7 @@ namespace OpenAI.Responses; // - Added Experimental attribute. // - Renamed. [Experimental("OPENAI001")] -[CodeGenType("ResponseTextAnnotationDeltaEvent")] +[CodeGenType("ResponseOutputTextAnnotationAddedEvent")] public partial class StreamingResponseTextAnnotationAddedUpdate { } \ No newline at end of file diff --git a/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.Serialization.cs b/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.Serialization.cs new file mode 100644 index 000000000..ad92ea62a --- /dev/null +++ b/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.Serialization.cs @@ -0,0 +1,184 @@ +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using OpenAI; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseTextAnnotationAddedUpdate : IJsonModel + { + internal StreamingResponseTextAnnotationAddedUpdate() : this(InternalResponseStreamEventType.ResponseOutputTextAnnotationAdded, default, null, null, default, default, default, null) + { + } + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + writer.WriteStartObject(); + JsonModelWriteCore(writer, options); + writer.WriteEndObject(); + } + + [Experimental("OPENAI001")] + protected override void JsonModelWriteCore(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StreamingResponseTextAnnotationAddedUpdate)} does not support writing '{format}' format."); + } + base.JsonModelWriteCore(writer, options); + if (_additionalBinaryDataProperties?.ContainsKey("item_id") != true) + { + writer.WritePropertyName("item_id"u8); + writer.WriteStringValue(ItemId); + } + if (_additionalBinaryDataProperties?.ContainsKey("output_index") != true) + { + writer.WritePropertyName("output_index"u8); + writer.WriteNumberValue(OutputIndex); + } + if (_additionalBinaryDataProperties?.ContainsKey("content_index") != true) + { + writer.WritePropertyName("content_index"u8); + writer.WriteNumberValue(ContentIndex); + } + if (_additionalBinaryDataProperties?.ContainsKey("annotation_index") != true) + { + writer.WritePropertyName("annotation_index"u8); + writer.WriteNumberValue(AnnotationIndex); + } + if (_additionalBinaryDataProperties?.ContainsKey("annotation") != true) + { + writer.WritePropertyName("annotation"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(Annotation); +#else + using (JsonDocument document = JsonDocument.Parse(Annotation)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + + StreamingResponseTextAnnotationAddedUpdate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) => (StreamingResponseTextAnnotationAddedUpdate)JsonModelCreateCore(ref reader, options); + + [Experimental("OPENAI001")] + protected override StreamingResponseUpdate JsonModelCreateCore(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(StreamingResponseTextAnnotationAddedUpdate)} does not support reading '{format}' format."); + } + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeStreamingResponseTextAnnotationAddedUpdate(document.RootElement, options); + } + + internal static StreamingResponseTextAnnotationAddedUpdate DeserializeStreamingResponseTextAnnotationAddedUpdate(JsonElement element, ModelReaderWriterOptions options) + { + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + InternalResponseStreamEventType kind = default; + int sequenceNumber = default; + IDictionary additionalBinaryDataProperties = new ChangeTrackingDictionary(); + string itemId = default; + int outputIndex = default; + int contentIndex = default; + int annotationIndex = default; + BinaryData annotation = default; + foreach (var prop in element.EnumerateObject()) + { + if (prop.NameEquals("type"u8)) + { + kind = new InternalResponseStreamEventType(prop.Value.GetString()); + continue; + } + if (prop.NameEquals("sequence_number"u8)) + { + sequenceNumber = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("item_id"u8)) + { + itemId = prop.Value.GetString(); + continue; + } + if (prop.NameEquals("output_index"u8)) + { + outputIndex = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("content_index"u8)) + { + contentIndex = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("annotation_index"u8)) + { + annotationIndex = prop.Value.GetInt32(); + continue; + } + if (prop.NameEquals("annotation"u8)) + { + annotation = BinaryData.FromString(prop.Value.GetRawText()); + continue; + } + // Plugin customization: remove options.Format != "W" check + additionalBinaryDataProperties.Add(prop.Name, BinaryData.FromString(prop.Value.GetRawText())); + } + return new StreamingResponseTextAnnotationAddedUpdate( + kind, + sequenceNumber, + additionalBinaryDataProperties, + itemId, + outputIndex, + contentIndex, + annotationIndex, + annotation); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) => PersistableModelWriteCore(options); + + [Experimental("OPENAI001")] + protected override BinaryData PersistableModelWriteCore(ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options, OpenAIContext.Default); + default: + throw new FormatException($"The model {nameof(StreamingResponseTextAnnotationAddedUpdate)} does not support writing '{options.Format}' format."); + } + } + + StreamingResponseTextAnnotationAddedUpdate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) => (StreamingResponseTextAnnotationAddedUpdate)PersistableModelCreateCore(data, options); + + [Experimental("OPENAI001")] + protected override StreamingResponseUpdate PersistableModelCreateCore(BinaryData data, ModelReaderWriterOptions options) + { + string format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + switch (format) + { + case "J": + using (JsonDocument document = JsonDocument.Parse(data)) + { + return DeserializeStreamingResponseTextAnnotationAddedUpdate(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(StreamingResponseTextAnnotationAddedUpdate)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + } +} diff --git a/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.cs b/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.cs new file mode 100644 index 000000000..2847c735d --- /dev/null +++ b/src/Generated/Models/StreamingResponseTextAnnotationAddedUpdate.cs @@ -0,0 +1,40 @@ +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace OpenAI.Responses +{ + public partial class StreamingResponseTextAnnotationAddedUpdate : StreamingResponseUpdate + { + internal StreamingResponseTextAnnotationAddedUpdate(int sequenceNumber, string itemId, int outputIndex, int contentIndex, int annotationIndex, BinaryData annotation) : base(InternalResponseStreamEventType.ResponseOutputTextAnnotationAdded, sequenceNumber) + { + ItemId = itemId; + OutputIndex = outputIndex; + ContentIndex = contentIndex; + AnnotationIndex = annotationIndex; + Annotation = annotation; + } + + internal StreamingResponseTextAnnotationAddedUpdate(InternalResponseStreamEventType kind, int sequenceNumber, IDictionary additionalBinaryDataProperties, string itemId, int outputIndex, int contentIndex, int annotationIndex, BinaryData annotation) : base(kind, sequenceNumber, additionalBinaryDataProperties) + { + ItemId = itemId; + OutputIndex = outputIndex; + ContentIndex = contentIndex; + AnnotationIndex = annotationIndex; + Annotation = annotation; + } + + public string ItemId { get; } + + public int OutputIndex { get; } + + public int ContentIndex { get; } + + public int AnnotationIndex { get; } + + public BinaryData Annotation { get; } + } +} diff --git a/src/Generated/Models/StreamingResponseUpdate.Serialization.cs b/src/Generated/Models/StreamingResponseUpdate.Serialization.cs index 8e3be952f..26b2db5f2 100644 --- a/src/Generated/Models/StreamingResponseUpdate.Serialization.cs +++ b/src/Generated/Models/StreamingResponseUpdate.Serialization.cs @@ -164,6 +164,8 @@ internal static StreamingResponseUpdate DeserializeStreamingResponseUpdate(JsonE return InternalResponseMCPListToolsFailedEvent.DeserializeInternalResponseMCPListToolsFailedEvent(element, options); case "response.mcp_list_tools.in_progress": return InternalResponseMCPListToolsInProgressEvent.DeserializeInternalResponseMCPListToolsInProgressEvent(element, options); + case "response.output_text.annotation.added": + return StreamingResponseTextAnnotationAddedUpdate.DeserializeStreamingResponseTextAnnotationAddedUpdate(element, options); case "response.queued": return StreamingResponseQueuedUpdate.DeserializeStreamingResponseQueuedUpdate(element, options); case "response.reasoning.delta":