Skip to content

Commit 3e7a3b8

Browse files
committed
Merge branch 'vertex-imagen' into ah/vertex-imagen-preview-2
# Conflicts: # FirebaseVertexAI/Sources/VertexAI.swift
2 parents 6dd2c30 + bb7dab5 commit 3e7a3b8

13 files changed

+61
-96
lines changed

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenImageRepresentable.swift renamed to FirebaseVertexAI/Sources/Types/Internal/Imagen/ImagenImageRepresentable.swift

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,12 @@
1414

1515
import Foundation
1616

17+
// TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as
18+
// input (upscaling / editing).
1719
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
18-
public protocol ImagenImageRepresentable {
19-
var _imagenImage: _ImagenImage { get }
20+
protocol ImagenImageRepresentable {
21+
/// Internal representation of the image for use with the Imagen model.
22+
///
23+
/// - Important: Not needed by SDK users.
24+
var _internalImagenImage: _InternalImagenImage { get }
2025
}

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenImage.swift renamed to FirebaseVertexAI/Sources/Types/Internal/Imagen/InternalImagenImage.swift

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,15 @@
1414

1515
import Foundation
1616

17+
/// Internal representation of an image for the Imagen model.
18+
///
19+
/// - Important: For internal use by types conforming to ``ImagenImageRepresentable``; all
20+
/// properties are `internal` and are not needed by SDK users.
21+
///
22+
/// TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as
23+
/// input (upscaling / editing).
1724
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
18-
public struct _ImagenImage {
25+
struct _InternalImagenImage {
1926
let mimeType: String
2027
let bytesBase64Encoded: String?
2128
let gcsURI: String?

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenFileDataImage.swift

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,10 @@ public struct ImagenFileDataImage {
2727

2828
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
2929
extension ImagenFileDataImage: ImagenImageRepresentable {
30-
public var _imagenImage: _ImagenImage {
31-
_ImagenImage(mimeType: mimeType, bytesBase64Encoded: nil, gcsURI: gcsURI)
30+
// TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as
31+
// input (upscaling / editing).
32+
var _internalImagenImage: _InternalImagenImage {
33+
_InternalImagenImage(mimeType: mimeType, bytesBase64Encoded: nil, gcsURI: gcsURI)
3234
}
3335
}
3436

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationConfig.swift

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,17 @@
1616
public struct ImagenGenerationConfig {
1717
public var numberOfImages: Int?
1818
public var negativePrompt: String?
19+
public var imageFormat: ImagenImageFormat?
1920
public var aspectRatio: ImagenAspectRatio?
21+
public var addWatermark: Bool?
2022

2123
public init(numberOfImages: Int? = nil, negativePrompt: String? = nil,
22-
aspectRatio: ImagenAspectRatio? = nil) {
24+
imageFormat: ImagenImageFormat? = nil, aspectRatio: ImagenAspectRatio? = nil,
25+
addWatermark: Bool? = nil) {
2326
self.numberOfImages = numberOfImages
2427
self.negativePrompt = negativePrompt
28+
self.imageFormat = imageFormat
2529
self.aspectRatio = aspectRatio
30+
self.addWatermark = addWatermark
2631
}
2732
}

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenGenerationResponse.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import Foundation
1616

1717
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
18-
public struct ImagenGenerationResponse<T: ImagenImageRepresentable> {
18+
public struct ImagenGenerationResponse<T> {
1919
public let images: [T]
2020
public let filteredReason: String?
2121
}

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenInlineDataImage.swift

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,10 @@ public struct ImagenInlineDataImage {
3131

3232
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
3333
extension ImagenInlineDataImage: ImagenImageRepresentable {
34-
public var _imagenImage: _ImagenImage {
35-
_ImagenImage(
34+
// TODO(andrewheard): Make this public when the SDK supports Imagen operations that take images as
35+
// input (upscaling / editing).
36+
var _internalImagenImage: _InternalImagenImage {
37+
_InternalImagenImage(
3638
mimeType: mimeType,
3739
bytesBase64Encoded: data.base64EncodedString(),
3840
gcsURI: nil

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModel.swift

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@ public final class ImagenModel {
2424
/// The backing service responsible for sending and receiving model requests to the backend.
2525
let generativeAIService: GenerativeAIService
2626

27-
let modelConfig: ImagenModelConfig?
28-
2927
let safetySettings: ImagenSafetySettings?
3028

3129
/// Configuration parameters for sending requests to the backend.
@@ -34,7 +32,6 @@ public final class ImagenModel {
3432
init(name: String,
3533
projectID: String,
3634
apiKey: String,
37-
modelConfig: ImagenModelConfig?,
3835
safetySettings: ImagenSafetySettings?,
3936
requestOptions: RequestOptions,
4037
appCheck: AppCheckInterop?,
@@ -48,7 +45,6 @@ public final class ImagenModel {
4845
auth: auth,
4946
urlSession: urlSession
5047
)
51-
self.modelConfig = modelConfig
5248
self.safetySettings = safetySettings
5349
self.requestOptions = requestOptions
5450
}
@@ -61,7 +57,6 @@ public final class ImagenModel {
6157
parameters: ImagenModel.imageGenerationParameters(
6258
storageURI: nil,
6359
generationConfig: generationConfig,
64-
modelConfig: modelConfig,
6560
safetySettings: safetySettings
6661
)
6762
)
@@ -75,15 +70,14 @@ public final class ImagenModel {
7570
parameters: ImagenModel.imageGenerationParameters(
7671
storageURI: storageURI,
7772
generationConfig: generationConfig,
78-
modelConfig: modelConfig,
7973
safetySettings: safetySettings
8074
)
8175
)
8276
}
8377

84-
func generateImages<T: Decodable>(prompt: String,
85-
parameters: ImageGenerationParameters) async throws
86-
-> ImagenGenerationResponse<T> {
78+
func generateImages<T>(prompt: String,
79+
parameters: ImageGenerationParameters) async throws
80+
-> ImagenGenerationResponse<T> where T: Decodable, T: ImagenImageRepresentable {
8781
let request = ImagenGenerationRequest<T>(
8882
model: modelResourceName,
8983
options: requestOptions,
@@ -96,7 +90,6 @@ public final class ImagenModel {
9690

9791
static func imageGenerationParameters(storageURI: String?,
9892
generationConfig: ImagenGenerationConfig?,
99-
modelConfig: ImagenModelConfig?,
10093
safetySettings: ImagenSafetySettings?)
10194
-> ImageGenerationParameters {
10295
return ImageGenerationParameters(
@@ -106,13 +99,13 @@ public final class ImagenModel {
10699
aspectRatio: generationConfig?.aspectRatio?.rawValue,
107100
safetyFilterLevel: safetySettings?.safetyFilterLevel?.rawValue,
108101
personGeneration: safetySettings?.personFilterLevel?.rawValue,
109-
outputOptions: modelConfig?.imageFormat.map {
102+
outputOptions: generationConfig?.imageFormat.map {
110103
ImageGenerationOutputOptions(
111104
mimeType: $0.mimeType,
112105
compressionQuality: $0.compressionQuality
113106
)
114107
},
115-
addWatermark: modelConfig?.addWatermark,
108+
addWatermark: generationConfig?.addWatermark,
116109
includeResponsibleAIFilterReason: true
117110
)
118111
}

FirebaseVertexAI/Sources/Types/Public/Imagen/ImagenModelConfig.swift

Lines changed: 0 additions & 24 deletions
This file was deleted.

FirebaseVertexAI/Sources/VertexAI.swift

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,6 @@ public class VertexAI {
113113
name: modelResourceName(modelName: modelName),
114114
projectID: projectID,
115115
apiKey: apiKey,
116-
modelConfig: modelConfig,
117116
safetySettings: safetySettings,
118117
requestOptions: requestOptions,
119118
appCheck: appCheck,

FirebaseVertexAI/Tests/TestApp/Tests/Integration/IntegrationTests.swift

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@ final class IntegrationTests: XCTestCase {
6464
)
6565
imagenModel = vertex.imagenModel(
6666
modelName: "imagen-3.0-fast-generate-001",
67-
modelConfig: ImagenModelConfig(imageFormat: .jpeg(compressionQuality: 70)),
6867
safetySettings: ImagenSafetySettings(
6968
safetyFilterLevel: .blockLowAndAbove,
7069
personFilterLevel: .blockAll
@@ -255,7 +254,10 @@ final class IntegrationTests: XCTestCase {
255254
overlooking a vast African savanna at sunset. Golden hour light, long shadows, sharp focus on
256255
the lion, shallow depth of field, detailed fur texture, DSLR, 85mm lens.
257256
"""
258-
let generationConfig = ImagenGenerationConfig(aspectRatio: .landscape16x9)
257+
let generationConfig = ImagenGenerationConfig(
258+
imageFormat: .jpeg(compressionQuality: 70),
259+
aspectRatio: .landscape16x9
260+
)
259261

260262
let response = try await imagenModel.generateImages(
261263
prompt: imagePrompt,

0 commit comments

Comments
 (0)