Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,29 @@ print(result.choices.first?.message.content ?? "")

## Structured Outputs -->

### Provider-specific fields

When calling OpenAI-compatible gateways that expect additional JSON fields, supply them through the optional `vendorParameters` argument on chat APIs. The SDK merges these values into the top-level payload without disturbing the documented schema.

For example, Volcengine's Deepseek models expose a `thinking` object that controls deep-thinking behaviour:

```swift
let vendorParameters: [String: JSONValue] = [
"thinking": .object([
"type": .string("disabled") // "enabled" and "auto" are also available.
])
]

let result = try await openAI.chats(
query: query,
vendorParameters: vendorParameters
)

for try await chunk in openAI.chatsStream(query: query, vendorParameters: vendorParameters) {
// Handle streamed result with the same vendor-specific field applied.
}
```

## Function calling

See [OpenAI Platform Guide: Function calling](https://platform.openai.com/docs/guides/function-calling?api-mode=responses) for more details.
Expand All @@ -371,7 +394,6 @@ See [OpenAI Platform Guide: Function calling](https://platform.openai.com/docs/g
<summary>Chat Completions API Examples</summary>

### Function calling with get_weather function

```swift
let openAI = OpenAI(apiToken: "...")
// Declare functions which model might decide to call.
Expand Down
8 changes: 4 additions & 4 deletions Sources/OpenAI/OpenAI+OpenAIAsync.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@ extension OpenAI: OpenAIAsync {
)
}

public func chats(query: ChatQuery) async throws -> ChatResult {
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) async throws -> ChatResult {
try await performRequestAsync(
request: makeChatsRequest(query: query)
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters)
)
}

public func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error> {
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AsyncThrowingStream<ChatStreamResult, Error> {
makeAsyncStream { onResult, completion in
chatsStream(query: query, onResult: onResult, completion: completion)
chatsStream(query: query, vendorParameters: vendorParameters, onResult: onResult, completion: completion)
}
}

Expand Down
8 changes: 4 additions & 4 deletions Sources/OpenAI/OpenAI+OpenAICombine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@ extension OpenAI: OpenAICombine {
)
}

public func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error> {
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AnyPublisher<ChatResult, Error> {
performRequestCombine(
request: makeChatsRequest(query: query)
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters)
)
}

public func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
makeStreamPublisher { onResult, completion in
chatsStream(query: query, onResult: onResult, completion: completion)
chatsStream(query: query, vendorParameters: vendorParameters, onResult: onResult, completion: completion)
}
}

Expand Down
20 changes: 15 additions & 5 deletions Sources/OpenAI/OpenAI.swift
Original file line number Diff line number Diff line change
Expand Up @@ -279,13 +279,23 @@ final public class OpenAI: OpenAIProtocol, @unchecked Sendable {
performRequest(request: makeEmbeddingsRequest(query: query), completion: completion)
}

public func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
performRequest(request: makeChatsRequest(query: query.makeNonStreamable()), completion: completion)
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
performRequest(
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters),
completion: completion
)
}

public func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
performStreamingRequest(
request: JSONRequest<ChatStreamResult>(body: query.makeStreamable(), url: buildURL(path: .chats)),
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
let streamableQuery = query.makeStreamable()
let body: Codable
if let vendorParameters, !vendorParameters.isEmpty {
body = ChatVendorRequestBody(query: streamableQuery, vendorParameters: vendorParameters)
} else {
body = streamableQuery
}
return performStreamingRequest(
request: JSONRequest<ChatStreamResult>(body: body, url: buildURL(path: .chats)),
onResult: onResult,
completion: completion
)
Expand Down
51 changes: 51 additions & 0 deletions Sources/OpenAI/Private/ChatVendorRequestBody.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
//
// ChatVendorRequestBody.swift
//
//
// Created by limchihi on 11/22/25.
//

import Foundation

struct ChatVendorRequestBody: Codable {
private let query: ChatQuery
private let vendorParameters: [String: JSONValue]

init(query: ChatQuery, vendorParameters: [String: JSONValue]) {
self.query = query
self.vendorParameters = vendorParameters
}

func encode(to encoder: Encoder) throws {
try query.encode(to: encoder)
guard !vendorParameters.isEmpty else { return }

var container = encoder.container(keyedBy: DynamicCodingKey.self)
for (key, value) in vendorParameters {
// Skip keys that are already part of the official Chat API payload.
if ChatQuery.CodingKeys(stringValue: key) != nil {
continue
}
guard let codingKey = DynamicCodingKey(stringValue: key) else { continue }
try container.encode(value, forKey: codingKey)
}
}

init(from decoder: Decoder) throws {
self.query = try ChatQuery(from: decoder)
self.vendorParameters = [:]
}
}

private struct DynamicCodingKey: CodingKey {
let stringValue: String
var intValue: Int? { nil }

init?(stringValue: String) {
self.stringValue = stringValue
}

init?(intValue: Int) {
return nil
}
}
11 changes: 9 additions & 2 deletions Sources/OpenAI/Private/OpenAI+MakeRequest.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ extension OpenAI {
.init(body: query, url: buildURL(path: .embeddings))
}

func makeChatsRequest(query: ChatQuery) -> JSONRequest<ChatResult> {
.init(body: query, url: buildURL(path: .chats))
func makeChatsRequest(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> JSONRequest<ChatResult> {
.init(body: makeChatBody(query: query, vendorParameters: vendorParameters), url: buildURL(path: .chats))
}

func makeModelRequest(query: ModelQuery) -> JSONRequest<ModelResult> {
Expand Down Expand Up @@ -153,4 +153,11 @@ extension OpenAI {
body: query
)
}

private func makeChatBody(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> Codable {
guard let vendorParameters, !vendorParameters.isEmpty else {
return query
}
return ChatVendorRequestBody(query: query, vendorParameters: vendorParameters)
}
}
108 changes: 108 additions & 0 deletions Sources/OpenAI/Public/Models/JSONValue.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
//
// JSONValue.swift
//
//
// Created by limchihi on 11/22/25.
//

import Foundation

/// Represents a JSON value that can be safely encoded into request payloads.
public enum JSONValue: Codable, Equatable {
case string(String)
case integer(Int)
case double(Double)
case bool(Bool)
case array([JSONValue])
case object([String: JSONValue])
case null

public init(from decoder: Decoder) throws {
let container = try decoder.singleValueContainer()
if container.decodeNil() {
self = .null
} else if let value = try? container.decode(Bool.self) {
self = .bool(value)
} else if let value = try? container.decode(Int.self) {
self = .integer(value)
} else if let value = try? container.decode(Double.self) {
self = .double(value)
} else if let value = try? container.decode(String.self) {
self = .string(value)
} else if let value = try? container.decode([JSONValue].self) {
self = .array(value)
} else if let value = try? container.decode([String: JSONValue].self) {
self = .object(value)
} else {
throw DecodingError.dataCorruptedError(in: container, debugDescription: "Unsupported JSON value.")
}
}

public func encode(to encoder: Encoder) throws {
var container = encoder.singleValueContainer()
switch self {
case let .string(value):
try container.encode(value)
case let .integer(value):
try container.encode(value)
case let .double(value):
try container.encode(value)
case let .bool(value):
try container.encode(value)
case let .array(values):
try container.encode(values)
case let .object(values):
try container.encode(values)
case .null:
try container.encodeNil()
}
}
}

// MARK: - Literal conformances

extension JSONValue: ExpressibleByStringLiteral {
public init(stringLiteral value: String) {
self = .string(value)
}
}

extension JSONValue: ExpressibleByIntegerLiteral {
public init(integerLiteral value: Int) {
self = .integer(value)
}
}

extension JSONValue: ExpressibleByFloatLiteral {
public init(floatLiteral value: Double) {
self = .double(value)
}
}

extension JSONValue: ExpressibleByBooleanLiteral {
public init(booleanLiteral value: BooleanLiteralType) {
self = .bool(value)
}
}

extension JSONValue: ExpressibleByArrayLiteral {
public init(arrayLiteral elements: JSONValue...) {
self = .array(elements)
}
}

extension JSONValue: ExpressibleByDictionaryLiteral {
public init(dictionaryLiteral elements: (String, JSONValue)...) {
var object: [String: JSONValue] = [:]
for (key, value) in elements {
object[key] = value
}
self = .object(object)
}
}

extension JSONValue: ExpressibleByNilLiteral {
public init(nilLiteral: ()) {
self = .null
}
}
15 changes: 13 additions & 2 deletions Sources/OpenAI/Public/Protocols/OpenAIAsync.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ public protocol OpenAIAsync: Sendable {
func imageEdits(query: ImageEditsQuery) async throws -> ImagesResult
func imageVariations(query: ImageVariationsQuery) async throws -> ImagesResult
func embeddings(query: EmbeddingsQuery) async throws -> EmbeddingsResult
func chats(query: ChatQuery) async throws -> ChatResult
func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error>
func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?) async throws -> ChatResult
func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AsyncThrowingStream<ChatStreamResult, Error>
func model(query: ModelQuery) async throws -> ModelResult
func models() async throws -> ModelsResult
func moderations(query: ModerationsQuery) async throws -> ModerationsResult
Expand All @@ -38,3 +38,14 @@ public protocol OpenAIAsync: Sendable {
func threadsAddMessage(threadId: String, query: MessageQuery) async throws -> ThreadAddMessageResult
func files(query: FilesQuery) async throws -> FilesResult
}

@available(iOS 13.0, macOS 10.15, tvOS 13.0, watchOS 6.0, *)
public extension OpenAIAsync {
func chats(query: ChatQuery) async throws -> ChatResult {
try await chats(query: query, vendorParameters: nil)
}

func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error> {
chatsStream(query: query, vendorParameters: nil)
}
}
15 changes: 13 additions & 2 deletions Sources/OpenAI/Public/Protocols/OpenAICombine.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ public protocol OpenAICombine: Sendable {
func imageEdits(query: ImageEditsQuery) -> AnyPublisher<ImagesResult, Error>
func imageVariations(query: ImageVariationsQuery) -> AnyPublisher<ImagesResult, Error>
func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AnyPublisher<ChatResult, Error>
func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
func models() -> AnyPublisher<ModelsResult, Error>
func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
Expand All @@ -38,4 +38,15 @@ public protocol OpenAICombine: Sendable {
func threadsAddMessage(threadId: String, query: MessageQuery) -> AnyPublisher<ThreadAddMessageResult, Error>
func files(query: FilesQuery) -> AnyPublisher<FilesResult, Error>
}

@available(iOS 13.0, tvOS 13.0, macOS 10.15, watchOS 6.0, *)
public extension OpenAICombine {
func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error> {
chats(query: query, vendorParameters: nil)
}

func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
chatsStream(query: query, vendorParameters: nil)
}
}
#endif
31 changes: 21 additions & 10 deletions Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,10 @@ public protocol OpenAIProtocol: OpenAIModern {

- Parameters:
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
- vendorParameters: Optional provider-specific fields that will be merged into the JSON payload.
- completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatResult, Error>`, will contain either the `ChatResult` object with the model's response to the conversation, or an error if the request failed.
**/
@discardableResult func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest
@discardableResult func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest

/**
This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI's powerful natural language models, like GPT-3. The result is returned by chunks.
Expand All @@ -115,15 +116,12 @@ public protocol OpenAIProtocol: OpenAIModern {
```

- Parameters:
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
- onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatStreamResult, Error>`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed.
- completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured.

- Returns: An object that references the streaming session.

- Note: This method creates and configures separate session object specifically for streaming. In order for it to work properly and don't leak memory you should hold a reference to the returned value, and when you're done - call cancel() on it.
*/
@discardableResult func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
- vendorParameters: Optional provider-specific fields that will be merged into the JSON payload.
- onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatStreamResult, Error>`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed.
- completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured
**/
@discardableResult func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest

/**
This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.
Expand Down Expand Up @@ -451,3 +449,16 @@ public protocol OpenAIProtocol: OpenAIModern {
**/
@discardableResult func files(query: FilesQuery, completion: @escaping @Sendable (Result<FilesResult, Error>) -> Void) -> CancellableRequest
}

@available(iOS 13.0, tvOS 13.0, macOS 10.15, watchOS 6.0, *)
public extension OpenAIProtocol {
@discardableResult
func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
chats(query: query, vendorParameters: nil, completion: completion)
}

@discardableResult
func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
chatsStream(query: query, vendorParameters: nil, onResult: onResult, completion: completion)
}
}
Loading