Skip to content

Commit 8a66d08

Browse files
committed
Add vendor parameter support for chat requests
1 parent 4ed3ea9 commit 8a66d08

File tree

12 files changed

+285
-30
lines changed

12 files changed

+285
-30
lines changed

README.md

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,6 +362,29 @@ print(result.choices.first?.message.content ?? "")
362362
363363
## Structured Outputs -->
364364

365+
### Provider-specific fields
366+
367+
When calling OpenAI-compatible gateways that expect additional JSON fields, supply them through the optional `vendorParameters` argument on chat APIs. The SDK merges these values into the top-level payload without disturbing the documented schema.
368+
369+
For example, Volcengine's Doubao models expose a `thinking` object that controls deep-thinking behaviour:
370+
371+
```swift
372+
let vendorParameters: [String: JSONValue] = [
373+
"thinking": .object([
374+
"type": .string("disabled") // "enabled" and "auto" are also available.
375+
])
376+
]
377+
378+
let result = try await openAI.chats(
379+
query: query,
380+
vendorParameters: vendorParameters
381+
)
382+
383+
for try await chunk in openAI.chatsStream(query: query, vendorParameters: vendorParameters) {
384+
// Handle streamed result with the same vendor-specific field applied.
385+
}
386+
```
387+
365388
## Function calling
366389

367390
See [OpenAI Platform Guide: Function calling](https://platform.openai.com/docs/guides/function-calling?api-mode=responses) for more details.
@@ -371,7 +394,6 @@ See [OpenAI Platform Guide: Function calling](https://platform.openai.com/docs/g
371394
<summary>Chat Completions API Examples</summary>
372395

373396
### Function calling with get_weather function
374-
375397
```swift
376398
let openAI = OpenAI(apiToken: "...")
377399
// Declare functions which model might decide to call.

Sources/OpenAI/OpenAI+OpenAIAsync.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,15 @@ extension OpenAI: OpenAIAsync {
3232
)
3333
}
3434

35-
public func chats(query: ChatQuery) async throws -> ChatResult {
35+
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) async throws -> ChatResult {
3636
try await performRequestAsync(
37-
request: makeChatsRequest(query: query)
37+
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters)
3838
)
3939
}
4040

41-
public func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error> {
41+
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AsyncThrowingStream<ChatStreamResult, Error> {
4242
makeAsyncStream { onResult, completion in
43-
chatsStream(query: query, onResult: onResult, completion: completion)
43+
chatsStream(query: query, vendorParameters: vendorParameters, onResult: onResult, completion: completion)
4444
}
4545
}
4646

Sources/OpenAI/OpenAI+OpenAICombine.swift

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,15 @@ extension OpenAI: OpenAICombine {
3232
)
3333
}
3434

35-
public func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error> {
35+
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AnyPublisher<ChatResult, Error> {
3636
performRequestCombine(
37-
request: makeChatsRequest(query: query)
37+
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters)
3838
)
3939
}
4040

41-
public func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
41+
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
4242
makeStreamPublisher { onResult, completion in
43-
chatsStream(query: query, onResult: onResult, completion: completion)
43+
chatsStream(query: query, vendorParameters: vendorParameters, onResult: onResult, completion: completion)
4444
}
4545
}
4646

Sources/OpenAI/OpenAI.swift

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -279,13 +279,23 @@ final public class OpenAI: OpenAIProtocol, @unchecked Sendable {
279279
performRequest(request: makeEmbeddingsRequest(query: query), completion: completion)
280280
}
281281

282-
public func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
283-
performRequest(request: makeChatsRequest(query: query.makeNonStreamable()), completion: completion)
282+
public func chats(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
283+
performRequest(
284+
request: makeChatsRequest(query: query.makeNonStreamable(), vendorParameters: vendorParameters),
285+
completion: completion
286+
)
284287
}
285288

286-
public func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
287-
performStreamingRequest(
288-
request: JSONRequest<ChatStreamResult>(body: query.makeStreamable(), url: buildURL(path: .chats)),
289+
public func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
290+
let streamableQuery = query.makeStreamable()
291+
let body: Codable
292+
if let vendorParameters, !vendorParameters.isEmpty {
293+
body = ChatVendorRequestBody(query: streamableQuery, vendorParameters: vendorParameters)
294+
} else {
295+
body = streamableQuery
296+
}
297+
return performStreamingRequest(
298+
request: JSONRequest<ChatStreamResult>(body: body, url: buildURL(path: .chats)),
289299
onResult: onResult,
290300
completion: completion
291301
)
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
//
2+
// ChatVendorRequestBody.swift
3+
//
4+
//
5+
// Created by limchihi on 11/22/25.
6+
//
7+
8+
import Foundation
9+
10+
struct ChatVendorRequestBody: Codable {
11+
private let query: ChatQuery
12+
private let vendorParameters: [String: JSONValue]
13+
14+
init(query: ChatQuery, vendorParameters: [String: JSONValue]) {
15+
self.query = query
16+
self.vendorParameters = vendorParameters
17+
}
18+
19+
func encode(to encoder: Encoder) throws {
20+
try query.encode(to: encoder)
21+
guard !vendorParameters.isEmpty else { return }
22+
23+
var container = encoder.container(keyedBy: DynamicCodingKey.self)
24+
for (key, value) in vendorParameters {
25+
// Skip keys that are already part of the official Chat API payload.
26+
if ChatQuery.CodingKeys(stringValue: key) != nil {
27+
continue
28+
}
29+
guard let codingKey = DynamicCodingKey(stringValue: key) else { continue }
30+
try container.encode(value, forKey: codingKey)
31+
}
32+
}
33+
34+
init(from decoder: Decoder) throws {
35+
self.query = try ChatQuery(from: decoder)
36+
self.vendorParameters = [:]
37+
}
38+
}
39+
40+
private struct DynamicCodingKey: CodingKey {
41+
let stringValue: String
42+
var intValue: Int? { nil }
43+
44+
init?(stringValue: String) {
45+
self.stringValue = stringValue
46+
}
47+
48+
init?(intValue: Int) {
49+
return nil
50+
}
51+
}

Sources/OpenAI/Private/OpenAI+MakeRequest.swift

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ extension OpenAI {
2525
.init(body: query, url: buildURL(path: .embeddings))
2626
}
2727

28-
func makeChatsRequest(query: ChatQuery) -> JSONRequest<ChatResult> {
29-
.init(body: query, url: buildURL(path: .chats))
28+
func makeChatsRequest(query: ChatQuery, vendorParameters: [String: JSONValue]? = nil) -> JSONRequest<ChatResult> {
29+
.init(body: makeChatBody(query: query, vendorParameters: vendorParameters), url: buildURL(path: .chats))
3030
}
3131

3232
func makeModelRequest(query: ModelQuery) -> JSONRequest<ModelResult> {
@@ -153,4 +153,11 @@ extension OpenAI {
153153
body: query
154154
)
155155
}
156+
157+
private func makeChatBody(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> Codable {
158+
guard let vendorParameters, !vendorParameters.isEmpty else {
159+
return query
160+
}
161+
return ChatVendorRequestBody(query: query, vendorParameters: vendorParameters)
162+
}
156163
}
Lines changed: 108 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
//
2+
// JSONValue.swift
3+
//
4+
//
5+
// Created by limchihi on 11/22/25.
6+
//
7+
8+
import Foundation
9+
10+
/// Represents a JSON value that can be safely encoded into request payloads.
11+
public enum JSONValue: Codable, Equatable {
12+
case string(String)
13+
case integer(Int)
14+
case double(Double)
15+
case bool(Bool)
16+
case array([JSONValue])
17+
case object([String: JSONValue])
18+
case null
19+
20+
public init(from decoder: Decoder) throws {
21+
let container = try decoder.singleValueContainer()
22+
if container.decodeNil() {
23+
self = .null
24+
} else if let value = try? container.decode(Bool.self) {
25+
self = .bool(value)
26+
} else if let value = try? container.decode(Int.self) {
27+
self = .integer(value)
28+
} else if let value = try? container.decode(Double.self) {
29+
self = .double(value)
30+
} else if let value = try? container.decode(String.self) {
31+
self = .string(value)
32+
} else if let value = try? container.decode([JSONValue].self) {
33+
self = .array(value)
34+
} else if let value = try? container.decode([String: JSONValue].self) {
35+
self = .object(value)
36+
} else {
37+
throw DecodingError.dataCorruptedError(in: container, debugDescription: "Unsupported JSON value.")
38+
}
39+
}
40+
41+
public func encode(to encoder: Encoder) throws {
42+
var container = encoder.singleValueContainer()
43+
switch self {
44+
case let .string(value):
45+
try container.encode(value)
46+
case let .integer(value):
47+
try container.encode(value)
48+
case let .double(value):
49+
try container.encode(value)
50+
case let .bool(value):
51+
try container.encode(value)
52+
case let .array(values):
53+
try container.encode(values)
54+
case let .object(values):
55+
try container.encode(values)
56+
case .null:
57+
try container.encodeNil()
58+
}
59+
}
60+
}
61+
62+
// MARK: - Literal conformances
63+
64+
extension JSONValue: ExpressibleByStringLiteral {
65+
public init(stringLiteral value: String) {
66+
self = .string(value)
67+
}
68+
}
69+
70+
extension JSONValue: ExpressibleByIntegerLiteral {
71+
public init(integerLiteral value: Int) {
72+
self = .integer(value)
73+
}
74+
}
75+
76+
extension JSONValue: ExpressibleByFloatLiteral {
77+
public init(floatLiteral value: Double) {
78+
self = .double(value)
79+
}
80+
}
81+
82+
extension JSONValue: ExpressibleByBooleanLiteral {
83+
public init(booleanLiteral value: BooleanLiteralType) {
84+
self = .bool(value)
85+
}
86+
}
87+
88+
extension JSONValue: ExpressibleByArrayLiteral {
89+
public init(arrayLiteral elements: JSONValue...) {
90+
self = .array(elements)
91+
}
92+
}
93+
94+
extension JSONValue: ExpressibleByDictionaryLiteral {
95+
public init(dictionaryLiteral elements: (String, JSONValue)...) {
96+
var object: [String: JSONValue] = [:]
97+
for (key, value) in elements {
98+
object[key] = value
99+
}
100+
self = .object(object)
101+
}
102+
}
103+
104+
extension JSONValue: ExpressibleByNilLiteral {
105+
public init(nilLiteral: ()) {
106+
self = .null
107+
}
108+
}

Sources/OpenAI/Public/Protocols/OpenAIAsync.swift

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ public protocol OpenAIAsync: Sendable {
1212
func imageEdits(query: ImageEditsQuery) async throws -> ImagesResult
1313
func imageVariations(query: ImageVariationsQuery) async throws -> ImagesResult
1414
func embeddings(query: EmbeddingsQuery) async throws -> EmbeddingsResult
15-
func chats(query: ChatQuery) async throws -> ChatResult
16-
func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error>
15+
func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?) async throws -> ChatResult
16+
func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AsyncThrowingStream<ChatStreamResult, Error>
1717
func model(query: ModelQuery) async throws -> ModelResult
1818
func models() async throws -> ModelsResult
1919
func moderations(query: ModerationsQuery) async throws -> ModerationsResult
@@ -38,3 +38,14 @@ public protocol OpenAIAsync: Sendable {
3838
func threadsAddMessage(threadId: String, query: MessageQuery) async throws -> ThreadAddMessageResult
3939
func files(query: FilesQuery) async throws -> FilesResult
4040
}
41+
42+
@available(iOS 13.0, macOS 10.15, tvOS 13.0, watchOS 6.0, *)
43+
public extension OpenAIAsync {
44+
func chats(query: ChatQuery) async throws -> ChatResult {
45+
try await chats(query: query, vendorParameters: nil)
46+
}
47+
48+
func chatsStream(query: ChatQuery) -> AsyncThrowingStream<ChatStreamResult, Error> {
49+
chatsStream(query: query, vendorParameters: nil)
50+
}
51+
}

Sources/OpenAI/Public/Protocols/OpenAICombine.swift

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ public protocol OpenAICombine: Sendable {
1414
func imageEdits(query: ImageEditsQuery) -> AnyPublisher<ImagesResult, Error>
1515
func imageVariations(query: ImageVariationsQuery) -> AnyPublisher<ImagesResult, Error>
1616
func embeddings(query: EmbeddingsQuery) -> AnyPublisher<EmbeddingsResult, Error>
17-
func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error>
18-
func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
17+
func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AnyPublisher<ChatResult, Error>
18+
func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?) -> AnyPublisher<Result<ChatStreamResult, Error>, Error>
1919
func model(query: ModelQuery) -> AnyPublisher<ModelResult, Error>
2020
func models() -> AnyPublisher<ModelsResult, Error>
2121
func moderations(query: ModerationsQuery) -> AnyPublisher<ModerationsResult, Error>
@@ -38,4 +38,15 @@ public protocol OpenAICombine: Sendable {
3838
func threadsAddMessage(threadId: String, query: MessageQuery) -> AnyPublisher<ThreadAddMessageResult, Error>
3939
func files(query: FilesQuery) -> AnyPublisher<FilesResult, Error>
4040
}
41+
42+
@available(iOS 13.0, tvOS 13.0, macOS 10.15, watchOS 6.0, *)
43+
public extension OpenAICombine {
44+
func chats(query: ChatQuery) -> AnyPublisher<ChatResult, Error> {
45+
chats(query: query, vendorParameters: nil)
46+
}
47+
48+
func chatsStream(query: ChatQuery) -> AnyPublisher<Result<ChatStreamResult, Error>, Error> {
49+
chatsStream(query: query, vendorParameters: nil)
50+
}
51+
}
4152
#endif

Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,10 @@ public protocol OpenAIProtocol: OpenAIModern {
9797

9898
- Parameters:
9999
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
100+
- vendorParameters: Optional provider-specific fields that will be merged into the JSON payload.
100101
- completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatResult, Error>`, will contain either the `ChatResult` object with the model's response to the conversation, or an error if the request failed.
101102
**/
102-
@discardableResult func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest
103+
@discardableResult func chats(query: ChatQuery, vendorParameters: [String: JSONValue]?, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest
103104

104105
/**
105106
This function sends a chat query to the OpenAI API and retrieves chat stream conversation responses. The Chat API enables you to build chatbots or conversational applications using OpenAI's powerful natural language models, like GPT-3. The result is returned by chunks.
@@ -115,15 +116,12 @@ public protocol OpenAIProtocol: OpenAIModern {
115116
```
116117

117118
- Parameters:
118-
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
119-
- onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatStreamResult, Error>`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed.
120-
- completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured.
121-
122-
- Returns: An object that references the streaming session.
123-
124-
- Note: This method creates and configures separate session object specifically for streaming. In order for it to work properly and don't leak memory you should hold a reference to the returned value, and when you're done - call cancel() on it.
125-
*/
126-
@discardableResult func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest
119+
- query: A `ChatQuery` object containing the input parameters for the API request. This includes the lists of message objects for the conversation, the model to be used, and other settings.
120+
- vendorParameters: Optional provider-specific fields that will be merged into the JSON payload.
121+
- onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result<ChatStreamResult, Error>`, will contain either the `ChatStreamResult` object with the model's response to the conversation, or an error if the request failed.
122+
- completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured
123+
**/
124+
@discardableResult func chatsStream(query: ChatQuery, vendorParameters: [String: JSONValue]?, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest
127125

128126
/**
129127
This function sends a model query to the OpenAI API and retrieves a model instance, providing owner information. The Models API in this usage enables you to gather detailed information on the model in question, like GPT-3.
@@ -451,3 +449,16 @@ public protocol OpenAIProtocol: OpenAIModern {
451449
**/
452450
@discardableResult func files(query: FilesQuery, completion: @escaping @Sendable (Result<FilesResult, Error>) -> Void) -> CancellableRequest
453451
}
452+
453+
@available(iOS 13.0, tvOS 13.0, macOS 10.15, watchOS 6.0, *)
454+
public extension OpenAIProtocol {
455+
@discardableResult
456+
func chats(query: ChatQuery, completion: @escaping @Sendable (Result<ChatResult, Error>) -> Void) -> CancellableRequest {
457+
chats(query: query, vendorParameters: nil, completion: completion)
458+
}
459+
460+
@discardableResult
461+
func chatsStream(query: ChatQuery, onResult: @escaping @Sendable (Result<ChatStreamResult, Error>) -> Void, completion: (@Sendable (Error?) -> Void)?) -> CancellableRequest {
462+
chatsStream(query: query, vendorParameters: nil, onResult: onResult, completion: completion)
463+
}
464+
}

0 commit comments

Comments
 (0)