diff --git a/async-openai/README.md b/async-openai/README.md index 28271272..56e69d5c 100644 --- a/async-openai/README.md +++ b/async-openai/README.md @@ -28,6 +28,7 @@ - [x] Batch - [x] Chat - [x] Completions (Legacy) + - [x] Conversations - [x] Embeddings - [x] Files - [x] Fine-Tuning diff --git a/async-openai/src/client.rs b/async-openai/src/client.rs index 1af00e1b..bed0be9a 100644 --- a/async-openai/src/client.rs +++ b/async-openai/src/client.rs @@ -13,8 +13,9 @@ use crate::{ image::Images, moderation::Moderations, traits::AsyncTryFrom, - Assistants, Audio, AuditLogs, Batches, Chat, Completions, Embeddings, FineTuning, Invites, - Models, Projects, Responses, Threads, Uploads, Users, VectorStores, Videos, + Assistants, Audio, AuditLogs, Batches, Chat, Completions, Conversations, Embeddings, + FineTuning, Invites, Models, Projects, Responses, Threads, Uploads, Users, VectorStores, + Videos, }; #[derive(Debug, Clone, Default)] @@ -172,6 +173,11 @@ impl Client { Responses::new(self) } + /// To call [Conversations] group related APIs using this client. + pub fn conversations(&self) -> Conversations<'_, C> { + Conversations::new(self) + } + pub fn config(&self) -> &C { &self.config } diff --git a/async-openai/src/config.rs b/async-openai/src/config.rs index 82ab043c..a55e007c 100644 --- a/async-openai/src/config.rs +++ b/async-openai/src/config.rs @@ -136,7 +136,7 @@ impl Config for OpenAIConfig { // hack for Assistants APIs // Calls to the Assistants API require that you pass a Beta header - headers.insert(OPENAI_BETA_HEADER, "assistants=v2".parse().unwrap()); + // headers.insert(OPENAI_BETA_HEADER, "assistants=v2".parse().unwrap()); headers } diff --git a/async-openai/src/conversation_items.rs b/async-openai/src/conversation_items.rs new file mode 100644 index 00000000..f58cd162 --- /dev/null +++ b/async-openai/src/conversation_items.rs @@ -0,0 +1,76 @@ +use serde::Serialize; + +use crate::{ + config::Config, + error::OpenAIError, + types::responses::{ + ConversationItem, ConversationItemList, ConversationResource, + CreateConversationItemsRequest, + }, + Client, +}; + +/// Conversation items represent items within a conversation. +pub struct ConversationItems<'c, C: Config> { + client: &'c Client, + pub conversation_id: String, +} + +impl<'c, C: Config> ConversationItems<'c, C> { + pub fn new(client: &'c Client, conversation_id: &str) -> Self { + Self { + client, + conversation_id: conversation_id.into(), + } + } + + /// Create items in a conversation. + #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] + pub async fn create( + &self, + request: CreateConversationItemsRequest, + ) -> Result { + self.client + .post( + &format!("/conversations/{}/items", &self.conversation_id), + request, + ) + .await + } + + /// List all items for a conversation. + #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] + pub async fn list(&self, query: &Q) -> Result + where + Q: Serialize + ?Sized, + { + self.client + .get_with_query( + &format!("/conversations/{}/items", &self.conversation_id), + &query, + ) + .await + } + + /// Retrieve an item from a conversation. + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn retrieve(&self, item_id: &str) -> Result { + self.client + .get(&format!( + "/conversations/{}/items/{item_id}", + &self.conversation_id + )) + .await + } + + /// Delete an item from a conversation. + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn delete(&self, item_id: &str) -> Result { + self.client + .delete(&format!( + "/conversations/{}/items/{item_id}", + &self.conversation_id + )) + .await + } +} diff --git a/async-openai/src/conversations.rs b/async-openai/src/conversations.rs new file mode 100644 index 00000000..41c0cee8 --- /dev/null +++ b/async-openai/src/conversations.rs @@ -0,0 +1,68 @@ +use crate::{ + config::Config, + conversation_items::ConversationItems, + error::OpenAIError, + types::responses::{ + ConversationResource, CreateConversationRequest, DeleteConversationResponse, + UpdateConversationRequest, + }, + Client, +}; + +pub struct Conversations<'c, C: Config> { + client: &'c Client, +} + +impl<'c, C: Config> Conversations<'c, C> { + pub fn new(client: &'c Client) -> Self { + Self { client } + } + + /// [ConversationItems] API group + pub fn items(&self, conversation_id: &str) -> ConversationItems<'_, C> { + ConversationItems::new(self.client, conversation_id) + } + + /// Create a conversation. + #[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)] + pub async fn create( + &self, + request: CreateConversationRequest, + ) -> Result { + self.client.post("/conversations", request).await + } + + /// Retrieves a conversation. + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn retrieve( + &self, + conversation_id: &str, + ) -> Result { + self.client + .get(&format!("/conversations/{conversation_id}")) + .await + } + + /// Delete a conversation. Items in the conversation will not be deleted. + #[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)] + pub async fn delete( + &self, + conversation_id: &str, + ) -> Result { + self.client + .delete(&format!("/conversations/{conversation_id}")) + .await + } + + /// Update a conversation. + #[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)] + pub async fn update( + &self, + conversation_id: &str, + request: UpdateConversationRequest, + ) -> Result { + self.client + .post(&format!("/conversations/{conversation_id}"), request) + .await + } +} diff --git a/async-openai/src/lib.rs b/async-openai/src/lib.rs index 3d1b8360..e8e81311 100644 --- a/async-openai/src/lib.rs +++ b/async-openai/src/lib.rs @@ -148,6 +148,8 @@ mod chat; mod client; mod completion; pub mod config; +mod conversation_items; +mod conversations; mod download; mod embedding; pub mod error; @@ -183,6 +185,8 @@ pub use batches::Batches; pub use chat::Chat; pub use client::Client; pub use completion::Completions; +pub use conversation_items::ConversationItems; +pub use conversations::Conversations; pub use embedding::Embeddings; pub use file::Files; pub use fine_tuning::FineTuning; diff --git a/async-openai/src/types/common.rs b/async-openai/src/types/common.rs index 1fc9017d..250e0443 100644 --- a/async-openai/src/types/common.rs +++ b/async-openai/src/types/common.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::{collections::HashMap, path::PathBuf}; use bytes::Bytes; use serde::{Deserialize, Serialize}; @@ -16,3 +16,19 @@ pub enum OrganizationRole { Owner, Reader, } + +/// Set of 16 key-value pairs that can be attached to an object. +/// This can be useful for storing additional information about the +/// object in a structured format, and querying for objects via API +/// or the dashboard. Keys are strings with a maximum length of 64 +/// characters. Values are strings with a maximum length of 512 +/// characters. +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Default)] +#[serde(transparent)] +pub struct Metadata(HashMap); + +impl From> for Metadata { + fn from(value: HashMap) -> Self { + Self(value) + } +} diff --git a/async-openai/src/types/responses/conversation.rs b/async-openai/src/types/responses/conversation.rs new file mode 100644 index 00000000..3a880166 --- /dev/null +++ b/async-openai/src/types/responses/conversation.rs @@ -0,0 +1,249 @@ +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; + +use crate::{ + error::OpenAIError, + types::responses::{ + AnyItemReference, CodeInterpreterToolCall, ComputerToolCall, CustomToolCall, + CustomToolCallOutput, FileSearchToolCall, ImageGenToolCall, InputFileContent, + InputImageContent, InputItem, InputTextContent, LocalShellToolCall, + LocalShellToolCallOutput, MCPApprovalRequest, MCPApprovalResponse, MCPListTools, + MCPToolCall, OutputTextContent, ReasoningItem, ReasoningTextContent, RefusalContent, + WebSearchToolCall, + }, +}; + +use crate::types::common::Metadata; + +/// Represents a conversation object. +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +pub struct ConversationResource { + /// The unique ID of the conversation. + pub id: String, + /// The object type, which is always `conversation`. + pub object: String, + /// Set of 16 key-value pairs that can be attached to an object. + pub metadata: Metadata, + /// The time at which the conversation was created, measured in seconds since the Unix epoch. + pub created_at: i64, +} + +/// Request to create a conversation. +/// openapi spec type: CreateConversationBody +#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] +#[builder(name = "CreateConversationRequestArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct CreateConversationRequest { + /// Set of 16 key-value pairs that can be attached to an object. + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option, + + /// Initial items to include in the conversation context. You may add up to 20 items at a time. + #[serde(skip_serializing_if = "Option::is_none")] + pub items: Option>, +} + +/// Request to update a conversation. +#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] +#[builder(name = "UpdateConversationRequestArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct UpdateConversationRequest { + /// Set of 16 key-value pairs that can be attached to an object. + pub metadata: Metadata, +} + +/// Represents a deleted conversation. +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +pub struct DeleteConversationResponse { + /// The unique ID of the deleted conversation. + pub id: String, + /// The object type, which is always `conversation.deleted`. + pub object: String, + /// Whether the conversation was successfully deleted. + pub deleted: bool, +} + +/// Request to create conversation items. +#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] +#[builder(name = "CreateConversationItemsRequestArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct CreateConversationItemsRequest { + /// The items to add to the conversation. You may add up to 20 items at a time. + pub items: Vec, +} + +/// A list of Conversation items. +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +pub struct ConversationItemList { + /// The type of object returned, must be `list`. + pub object: String, + /// A list of conversation items. + pub data: Vec, + /// Whether there are more items available. + pub has_more: bool, + /// The ID of the first item in the list. + pub first_id: String, + /// The ID of the last item in the list. + pub last_id: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum MessageStatus { + InProgress, + Incomplete, + Completed, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum MessageRole { + Unknown, + User, + Assistant, + System, + Critic, + Discriminator, + Developer, + Tool, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct TextContent { + pub text: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct SummaryTextContent { + /// A summary of the reasoning output from the model so far. + pub text: String, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct ComputerScreenContent { + /// The URL of the screenshot image. + pub image_url: Option, + /// The identifier of an uploaded file that contains the screenshot. + pub file_id: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum MessageContent { + InputText(InputTextContent), + OutputText(OutputTextContent), + Text(TextContent), + SummaryText(SummaryTextContent), + ReasoningText(ReasoningTextContent), + Refusal(RefusalContent), + InputImage(InputImageContent), + ComputerScreen(ComputerScreenContent), + InputFile(InputFileContent), +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct Message { + /// The unique ID of the message. + pub id: String, + /// The status of item. One of `in_progress`, `completed`, or `incomplete`. Populated when items are + /// returned via API. + pub status: MessageStatus, + /// The role of the message. One of `unknown`, `user`, `assistant`, `system`, `critic`, + /// `discriminator`, `developer`, or `tool`. + pub role: MessageRole, + /// The content of the message. + pub content: Vec, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ConversationItem { + Message(Message), + FileSearchCall(FileSearchToolCall), + WebSearchCall(WebSearchToolCall), + ImageGenerationCall(ImageGenToolCall), + ComputerCall(ComputerToolCall), + Reasoning(ReasoningItem), + CodeInterpreterCall(CodeInterpreterToolCall), + LocalShellCall(LocalShellToolCall), + LocalShellCallOutput(LocalShellToolCallOutput), + McpListTools(MCPListTools), + McpApprovalRequest(MCPApprovalRequest), + McpApprovalResponse(MCPApprovalResponse), + McpCall(MCPToolCall), + CustomToolCall(CustomToolCall), + CustomToolCallOutput(CustomToolCallOutput), + #[serde(untagged)] + ItemReference(AnyItemReference), +} + +/// Additional fields to include in the response. +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +#[serde(rename_all = "snake_case")] +pub enum IncludeParam { + /// Include the sources of the web search tool call. + #[serde(rename = "web_search_call.action.sources")] + WebSearchCallActionSources, + /// Include the outputs of python code execution in code interpreter tool call items. + #[serde(rename = "code_interpreter_call.outputs")] + CodeInterpreterCallOutputs, + /// Include image urls from the computer call output. + #[serde(rename = "computer_call_output.output.image_url")] + ComputerCallOutputOutputImageUrl, + /// Include the search results of the file search tool call. + #[serde(rename = "file_search_call.results")] + FileSearchCallResults, + /// Include image urls from the input message. + #[serde(rename = "message.input_image.image_url")] + MessageInputImageImageUrl, + /// Include logprobs with assistant messages. + #[serde(rename = "message.output_text.logprobs")] + MessageOutputTextLogprobs, + /// Include an encrypted version of reasoning tokens in reasoning item outputs. + #[serde(rename = "reasoning.encrypted_content")] + ReasoningEncryptedContent, +} + +/// Query parameters for listing conversation items. +#[derive(Clone, Serialize, Default, Debug, Deserialize, Builder, PartialEq)] +#[builder(name = "ListConversationItemsQueryArgs")] +#[builder(pattern = "mutable")] +#[builder(setter(into, strip_option), default)] +#[builder(derive(Debug))] +#[builder(build_fn(error = "OpenAIError"))] +pub struct ListConversationItemsQuery { + /// A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + + /// The order to return the input items in. Default is `desc`. + #[serde(skip_serializing_if = "Option::is_none")] + pub order: Option, + + /// An item ID to list items after, used in pagination. + #[serde(skip_serializing_if = "Option::is_none")] + pub after: Option, + + /// Specify additional output data to include in the model response. + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, +} + +/// The order to return items in. +#[derive(Clone, Serialize, Debug, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum ListOrder { + /// Return items in ascending order. + Asc, + /// Return items in descending order. + Desc, +} diff --git a/async-openai/src/types/responses/mod.rs b/async-openai/src/types/responses/mod.rs index 8d2635c2..546ba303 100644 --- a/async-openai/src/types/responses/mod.rs +++ b/async-openai/src/types/responses/mod.rs @@ -1,5 +1,7 @@ +mod conversation; mod response; mod response_stream; +pub use conversation::*; pub use response::*; pub use response_stream::*; diff --git a/async-openai/src/types/responses/response.rs b/async-openai/src/types/responses/response.rs index 43e3e84e..c571440c 100644 --- a/async-openai/src/types/responses/response.rs +++ b/async-openai/src/types/responses/response.rs @@ -2422,7 +2422,7 @@ pub struct DeleteResponse { } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub struct InputItemReference { +pub struct AnyItemReference { pub r#type: Option, pub id: String, } @@ -2450,7 +2450,7 @@ pub enum ItemResourceItem { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] #[serde(untagged)] pub enum ItemResource { - ItemReference(InputItemReference), + ItemReference(AnyItemReference), Item(ItemResourceItem), } diff --git a/examples/conversations/Cargo.toml b/examples/conversations/Cargo.toml new file mode 100644 index 00000000..36a4799e --- /dev/null +++ b/examples/conversations/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "conversations" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +async-openai = { path = "../../async-openai" } +tokio = { version = "1.41.1", features = ["full"] } +serde_json = "1" + diff --git a/examples/conversations/src/main.rs b/examples/conversations/src/main.rs new file mode 100644 index 00000000..0fad7452 --- /dev/null +++ b/examples/conversations/src/main.rs @@ -0,0 +1,179 @@ +use async_openai::{ + types::responses::{ + ConversationItem, CreateConversationItemsRequestArgs, CreateConversationRequestArgs, + EasyInputContent, EasyInputMessage, InputItem, ListConversationItemsQuery, MessageType, + Role, UpdateConversationRequestArgs, + }, + Client, +}; +use std::collections::HashMap; + +#[tokio::main] +async fn main() -> Result<(), Box> { + // Create client + let client = Client::new(); + + println!("=== Conversations API Example ===\n"); + + // 1. Create a conversation with initial items + println!("1. Creating a conversation with initial messages..."); + let conversation = client + .conversations() + .create( + CreateConversationRequestArgs::default() + .metadata({ + let mut metadata = HashMap::new(); + metadata.insert("topic".to_string(), "demo".to_string()); + metadata + }) + .items(vec![InputItem::from_easy_message(EasyInputMessage { + r#type: MessageType::Message, + role: Role::User, + content: EasyInputContent::Text( + "Hello! Can you help me understand conversations?".to_string(), + ), + })]) + .build()?, + ) + .await?; + + println!("Created conversation: {}", conversation.id); + println!("Created at: {}", conversation.created_at); + println!("Metadata: {:?}\n", conversation.metadata); + + // 2. Add more items to the conversation + println!("2. Adding more items to the conversation..."); + let items_list = client + .conversations() + .items(&conversation.id) + .create( + CreateConversationItemsRequestArgs::default() + .items(vec![ + InputItem::from_easy_message(EasyInputMessage { + r#type: MessageType::Message, + role: Role::User, + content: EasyInputContent::Text("What are the main features?".to_string()), + }), + InputItem::from_easy_message(EasyInputMessage { + r#type: MessageType::Message, + role: Role::User, + content: EasyInputContent::Text("Can you give me an example?".to_string()), + }), + ]) + .build()?, + ) + .await?; + + println!("Added {} items", items_list.data.len()); + println!("First item ID: {}", items_list.first_id); + println!("Last item ID: {}\n", items_list.last_id); + + // 3. List all items in the conversation + println!("3. Listing conversation items..."); + let query = ListConversationItemsQuery { + limit: Some(10), + order: None, + after: None, + include: None, + }; + let all_items = client + .conversations() + .items(&conversation.id) + .list(&query) + .await?; + + println!("Total items retrieved: {}", all_items.data.len()); + println!("Has more: {}", all_items.has_more); + for (i, item) in all_items.data.iter().enumerate() { + println!(" Item {}: {:?}", i + 1, item); + } + println!(); + + // 4. Retrieve a specific item + if !all_items.data.is_empty() { + println!("4. Retrieving a specific item..."); + let first_item_id = &all_items.first_id; + let item = client + .conversations() + .items(&conversation.id) + .retrieve(first_item_id) + .await?; + println!("Retrieved item: {:?}\n", item); + } + + // 5. Update conversation metadata + println!("5. Updating conversation metadata..."); + let updated_conversation = client + .conversations() + .update( + &conversation.id, + UpdateConversationRequestArgs::default() + .metadata({ + let mut metadata = HashMap::new(); + metadata.insert("topic".to_string(), "updated-demo".into()); + metadata.insert("status".to_string(), "active".into()); + metadata + }) + .build()?, + ) + .await?; + + println!("Updated metadata: {:?}\n", updated_conversation.metadata); + + // 6. Retrieve the conversation + println!("6. Retrieving the conversation..."); + let retrieved_conversation = client.conversations().retrieve(&conversation.id).await?; + println!("Retrieved conversation: {}", retrieved_conversation.id); + println!("Metadata: {:?}\n", retrieved_conversation.metadata); + + // 7. Delete the conversation items. + println!("7. Deleting the conversation items..."); + for item in all_items.data { + let item_id = match item { + ConversationItem::Message(message) => message.id, + ConversationItem::FileSearchCall(file_search_tool_call) => file_search_tool_call.id, + ConversationItem::WebSearchCall(web_search_tool_call) => web_search_tool_call.id, + ConversationItem::ImageGenerationCall(image_gen_tool_call) => image_gen_tool_call.id, + ConversationItem::ComputerCall(computer_tool_call) => computer_tool_call.id, + ConversationItem::Reasoning(reasoning_item) => reasoning_item.id, + ConversationItem::CodeInterpreterCall(code_interpreter_tool_call) => { + code_interpreter_tool_call.id + } + ConversationItem::LocalShellCall(local_shell_tool_call) => local_shell_tool_call.id, + ConversationItem::LocalShellCallOutput(local_shell_tool_call_output) => { + local_shell_tool_call_output.id + } + ConversationItem::McpListTools(mcplist_tools) => mcplist_tools.id, + ConversationItem::McpApprovalRequest(mcpapproval_request) => mcpapproval_request.id, + ConversationItem::McpApprovalResponse(mcpapproval_response) => { + mcpapproval_response.id.unwrap() + } + ConversationItem::McpCall(mcptool_call) => mcptool_call.id, + ConversationItem::CustomToolCall(custom_tool_call) => custom_tool_call.id, + ConversationItem::CustomToolCallOutput(custom_tool_call_output) => { + custom_tool_call_output.id.unwrap() + } + ConversationItem::ItemReference(any_item_reference) => any_item_reference.id, + }; + + let conversation_resource = client + .conversations() + .items(&conversation.id) + .delete(&item_id) + .await?; + println!( + "Item deleted: item id: {item_id}, conversation id: {}", + conversation_resource.id + ); + } + + // 8. Delete the conversation + println!("8. Deleting the conversation..."); + let deleted = client.conversations().delete(&conversation.id).await?; + println!("Conversation deleted: {}", deleted.deleted); + println!("Deleted ID: {}\n", deleted.id); + + println!("=== Example completed successfully! ==="); + + Ok(()) +}