Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
# We pass the list of examples here, but we can't pass an array as argument
# Instead, we pass a String with a valid JSON array.
# The workaround is mentioned here https://github.com/orgs/community/discussions/11692
examples: "[ 'api-key', 'converse', 'converse-stream', 'text_chat' ]"
examples: "[ 'api-key', 'converse', 'converse-stream', 'openai', 'text_chat' ]"

swift-6-language-mode:
name: Swift 6 Language Mode
Expand Down
8 changes: 8 additions & 0 deletions Examples/openai/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
.DS_Store
/.build
/Packages
xcuserdata/
DerivedData/
.swiftpm/configuration/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
36 changes: 36 additions & 0 deletions Examples/openai/Package.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// swift-tools-version: 6.1
// The swift-tools-version declares the minimum version of Swift required to build this package.

import PackageDescription

let package = Package(
name: "OpenAI",
platforms: [.macOS(.v15), .iOS(.v18), .tvOS(.v18)],
dependencies: [
// for production use, uncomment the following line
// .package(url: "https://github.com/build-on-aws/swift-bedrock-library.git", branch: "main"),

// for local development, use the following line
.package(name: "swift-bedrock-library", path: "../.."),

.package(url: "https://github.com/apple/swift-log.git", from: "1.5.0"),
],
targets: [
.executableTarget(
name: "OpenAIInvoke",
dependencies: [
.product(name: "BedrockService", package: "swift-bedrock-library"),
.product(name: "Logging", package: "swift-log"),
],
path: "Sources/Invoke"
),
.executableTarget(
name: "OpenAIConverse",
dependencies: [
.product(name: "BedrockService", package: "swift-bedrock-library"),
.product(name: "Logging", package: "swift-log"),
],
path: "Sources/Converse"
),
]
)
32 changes: 32 additions & 0 deletions Examples/openai/Sources/Converse/main.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift Bedrock Library open source project
//
// Copyright (c) 2025 Amazon.com, Inc. or its affiliates
// and the Swift Bedrock Library project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of Swift Bedrock Library project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//

import BedrockService
import Logging

var logger = Logger(label: "OpenAIConverse")
logger.logLevel = .debug

let bedrock = try await BedrockService(
region: .uswest2,
logger: logger,
)

var builder = try ConverseRequestBuilder(with: .openai_gpt_oss_20b)
.withPrompt("Who are you?")

var reply = try await bedrock.converse(with: builder)

print("Assistant: \(reply)")
35 changes: 35 additions & 0 deletions Examples/openai/Sources/Invoke/main.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift Bedrock Library open source project
//
// Copyright (c) 2025 Amazon.com, Inc. or its affiliates
// and the Swift Bedrock Library project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of Swift Bedrock Library project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//

import BedrockService
import Logging

var logger = Logger(label: "OpenAIInvoke")
logger.logLevel = .debug

let bedrock = try await BedrockService(
region: .uswest2,
logger: logger
)

let textCompletion = try await bedrock.completeText(
"Who are you?",
with: .openai_gpt_oss_20b
)

if let reasoning = textCompletion.reasoning {
print("------- Reasoning ----------\n\(reasoning)\n----------------------------\n")
}
print(textCompletion.completion)
3 changes: 3 additions & 0 deletions Sources/BedrockModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ public struct BedrockModel: Hashable, Sendable, Equatable, RawRepresentable {
//cohere
case BedrockModel.cohere_command_R_plus.id: self = BedrockModel.cohere_command_R_plus
case BedrockModel.cohere_command_R.id: self = BedrockModel.cohere_command_R
// OpenAI
case BedrockModel.openai_gpt_oss_20b.id: self = BedrockModel.openai_gpt_oss_20b
case BedrockModel.openai_gpt_oss_120b.id: self = BedrockModel.openai_gpt_oss_120b
default:
return nil
}
Expand Down
2 changes: 1 addition & 1 deletion Sources/Converse/ConverseReply.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
//
//===----------------------------------------------------------------------===//

public struct ConverseReply: Codable, CustomStringConvertible {
public struct ConverseReply: Codable, Sendable, CustomStringConvertible {
let history: History
let textReply: String?
let toolUse: ToolUseBlock?
Expand Down
3 changes: 2 additions & 1 deletion Sources/InvokeModel/BedrockService+InvokeModelImage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,8 @@ extension BedrockService {
}
let invokemodelResponse: InvokeModelResponse = try InvokeModelResponse.createImageResponse(
body: responseBody,
model: model
model: model,
logger: self.logger
)
return try invokemodelResponse.getGeneratedImage()
}
Expand Down
3 changes: 2 additions & 1 deletion Sources/InvokeModel/BedrockService+InvokeModelText.swift
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,8 @@ extension BedrockService {

let invokemodelResponse: InvokeModelResponse = try InvokeModelResponse.createTextResponse(
body: responseBody,
model: model
model: model,
logger: self.logger
)
logger.trace(
"Generated text completion",
Expand Down
7 changes: 5 additions & 2 deletions Sources/InvokeModel/InvokeModelResponse.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

@preconcurrency import AWSBedrockRuntime
import Foundation
import Logging

public struct InvokeModelResponse {
let model: BedrockModel
Expand Down Expand Up @@ -50,9 +51,10 @@ public struct InvokeModelResponse {
/// - model: The Bedrock model that generated the response
/// - Throws: BedrockLibraryError.invalidModel if the model is not supported
/// BedrockLibraryError.invalidResponseBody if the response cannot be decoded
static func createTextResponse(body data: Data, model: BedrockModel) throws -> Self {
static func createTextResponse(body data: Data, model: BedrockModel, logger: Logger) throws -> Self {
do {
let textModality = try model.getTextModality()
logger.trace("Raw response data:\n\(String(data: data, encoding: .utf8) ?? "")\n")
return self.init(model: model, textCompletionBody: try textModality.getTextResponseBody(from: data))
} catch {
throw BedrockLibraryError.invalidSDKResponseBody(data)
Expand All @@ -65,9 +67,10 @@ public struct InvokeModelResponse {
/// - model: The Bedrock model that generated the response
/// - Throws: BedrockLibraryError.invalidModel if the model is not supported
/// BedrockLibraryError.invalidResponseBody if the response cannot be decoded
static func createImageResponse(body data: Data, model: BedrockModel) throws -> Self {
static func createImageResponse(body data: Data, model: BedrockModel, logger: Logger) throws -> Self {
do {
let imageModality = try model.getImageModality()
logger.trace("Raw response", metadata: ["Data": "\(String(data: data, encoding: .utf8) ?? "")"])
return self.init(model: model, imageGenerationBody: try imageModality.getImageResponseBody(from: data))
} catch {
throw BedrockLibraryError.invalidSDKResponseBody(data)
Expand Down
20 changes: 18 additions & 2 deletions Sources/InvokeModel/TextCompletion.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,26 @@

import Foundation

public struct TextCompletion: Codable {
public struct TextCompletion: Codable, Sendable {
public let completion: String
public let reasoning: String?

public init(_ completion: String) {
self.completion = completion
let (extractedCompletion, extractedReasoning) = Self.extractReasoning(from: completion)
self.completion = extractedCompletion
self.reasoning = extractedReasoning
}

private static func extractReasoning(from text: String) -> (completion: String, reasoning: String?) {
let reasoningRegex = /<reasoning>(.*?)<\/reasoning>/

guard let match = text.firstMatch(of: reasoningRegex) else {
return (text, nil)
}

let reasoning = String(match.1)
let cleanedText = text.replacing(reasoningRegex, with: "")

return (cleanedText, reasoning)
}
}
74 changes: 74 additions & 0 deletions Sources/Models/OpenAI/OpenAI.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift Bedrock Library open source project
//
// Copyright (c) 2025 Amazon.com, Inc. or its affiliates
// and the Swift Bedrock Library project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of Swift Bedrock Library project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//

import Foundation

struct OpenAIText: TextModality, ConverseModality {
let parameters: TextGenerationParameters
let converseParameters: ConverseParameters
let converseFeatures: [ConverseFeature]
let maxReasoningTokens: Parameter<Int>

func getName() -> String { "OpenAI Text Generation" }

init(
parameters: TextGenerationParameters,
features: [ConverseFeature] = [.textGeneration, .systemPrompts, .document],
maxReasoningTokens: Parameter<Int> = .notSupported(.maxReasoningTokens)
) {
self.parameters = parameters
self.converseFeatures = features
self.converseParameters = ConverseParameters(
textGenerationParameters: parameters,
maxReasoningTokens: maxReasoningTokens
)
self.maxReasoningTokens = maxReasoningTokens
}

func getParameters() -> TextGenerationParameters {
parameters
}

func getConverseParameters() -> ConverseParameters {
ConverseParameters(textGenerationParameters: parameters, maxReasoningTokens: maxReasoningTokens)
}

func getTextRequestBody(
prompt: String,
maxTokens: Int?,
temperature: Double?,
topP: Double?,
topK: Int?,
stopSequences: [String]?
) throws -> BedrockBodyCodable {
guard let maxTokens = maxTokens ?? parameters.maxTokens.defaultValue else {
throw BedrockLibraryError.notFound("No value was given for maxTokens and no default value was found")
}
if topP != nil && temperature != nil {
throw BedrockLibraryError.notSupported("Alter either topP or temperature, but not both.")
}
Comment on lines +59 to +61
Copy link

Copilot AI Aug 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The validation logic prevents using both topP and temperature simultaneously, but this restriction may not align with OpenAI's actual API capabilities. Consider verifying this constraint against OpenAI's documentation.

Suggested change
if topP != nil && temperature != nil {
throw BedrockLibraryError.notSupported("Alter either topP or temperature, but not both.")
}
// Both topP and temperature can be set simultaneously per OpenAI API documentation.

Copilot uses AI. Check for mistakes.
return OpenAIRequestBody(
prompt: prompt,
maxTokens: maxTokens,
temperature: temperature ?? parameters.temperature.defaultValue,
topP: topP ?? parameters.topP.defaultValue,
Comment on lines +62 to +66
Copy link

Copilot AI Aug 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If both topP and temperature are nil, the code will use the default temperature value even when topP should be used instead. This could lead to unexpected parameter combinations.

Suggested change
return OpenAIRequestBody(
prompt: prompt,
maxTokens: maxTokens,
temperature: temperature ?? parameters.temperature.defaultValue,
topP: topP ?? parameters.topP.defaultValue,
// If both are nil, prefer topP and set temperature to nil
let resolvedTemperature: Double?
let resolvedTopP: Double?
if let t = temperature {
resolvedTemperature = t
resolvedTopP = nil
} else if let p = topP {
resolvedTemperature = nil
resolvedTopP = p
} else {
// Both are nil: prefer topP default, temperature nil
resolvedTemperature = nil
resolvedTopP = parameters.topP.defaultValue
}
return OpenAIRequestBody(
prompt: prompt,
maxTokens: maxTokens,
temperature: resolvedTemperature,
topP: resolvedTopP,

Copilot uses AI. Check for mistakes.
)
}

func getTextResponseBody(from data: Data) throws -> ContainsTextCompletion {
let decoder = JSONDecoder()
return try decoder.decode(OpenAIResponseBody.self, from: data)
}
}
51 changes: 51 additions & 0 deletions Sources/Models/OpenAI/OpenAIBedrockModels.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift Bedrock Library open source project
//
// Copyright (c) 2025 Amazon.com, Inc. or its affiliates
// and the Swift Bedrock Library project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of Swift Bedrock Library project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//

import Foundation

// https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-openai.html

extension BedrockModel {
public static let openai_gpt_oss_20b: BedrockModel = BedrockModel(
id: "openai.gpt-oss-20b-1:0",
name: "OpenAI GPT OSS 20b",
modality: OpenAIText(
parameters: TextGenerationParameters(
temperature: Parameter(.temperature, minValue: 0, maxValue: 1, defaultValue: 0.7),
maxTokens: Parameter(.maxTokens, minValue: 0, maxValue: 2_048, defaultValue: 150),
topP: Parameter(.topP, minValue: 0, maxValue: 1, defaultValue: 0.9),
topK: Parameter.notSupported(.topK),
stopSequences: StopSequenceParams.notSupported(),
maxPromptSize: nil
),
features: [.textGeneration, .systemPrompts, .document]
)
)
public static let openai_gpt_oss_120b: BedrockModel = BedrockModel(
id: "openai.gpt-oss-120b-1:0",
name: "OpenAI GPT OSS 120b",
modality: OpenAIText(
parameters: TextGenerationParameters(
temperature: Parameter(.temperature, minValue: 0, maxValue: 1, defaultValue: 0.7),
maxTokens: Parameter(.maxTokens, minValue: 0, maxValue: 2_048, defaultValue: 150),
topP: Parameter(.topP, minValue: 0, maxValue: 1, defaultValue: 0.9),
topK: Parameter.notSupported(.topK),
stopSequences: StopSequenceParams.notSupported(),
maxPromptSize: nil
),
features: [.textGeneration, .systemPrompts, .document]
)
)
}
42 changes: 42 additions & 0 deletions Sources/Models/OpenAI/OpenAIRequestBody.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
//===----------------------------------------------------------------------===//
//
// This source file is part of the Swift Bedrock Library open source project
//
// Copyright (c) 2025 Amazon.com, Inc. or its affiliates
// and the Swift Bedrock Library project authors
// Licensed under Apache License v2.0
//
// See LICENSE.txt for license information
// See CONTRIBUTORS.txt for the list of Swift Bedrock Library project authors
//
// SPDX-License-Identifier: Apache-2.0
//
//===----------------------------------------------------------------------===//

import Foundation

public struct OpenAIRequestBody: BedrockBodyCodable {
private let max_completion_tokens: Int
private let temperature: Double?
private let top_p: Double?
private let messages: [OpenAIMessage]

public init(
prompt: String,
maxTokens: Int,
temperature: Double?,
topP: Double?
) {
self.max_completion_tokens = maxTokens
self.temperature = temperature
self.messages = [
OpenAIMessage(role: .user, content: prompt)
]
self.top_p = topP
}

private struct OpenAIMessage: Codable {
Copy link

Copilot AI Aug 8, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The Role type is referenced but not defined in this file. This creates an incomplete API definition that could cause compilation errors.

Copilot uses AI. Check for mistakes.
let role: Role
let content: String
}
}
Loading
Loading