Skip to content

Commit ee3fde0

Browse files
committed
Add ImagenModel
1 parent c7bd464 commit ee3fde0

File tree

3 files changed

+90
-1
lines changed

3 files changed

+90
-1
lines changed

FirebaseVertexAI/Sources/GenerativeAIService.swift

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,6 @@ struct GenerativeAIService {
203203
}
204204

205205
let encoder = JSONEncoder()
206-
encoder.keyEncodingStrategy = .convertToSnakeCase
207206
urlRequest.httpBody = try encoder.encode(request)
208207
urlRequest.timeoutInterval = request.options.timeout
209208

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import FirebaseAppCheckInterop
2+
import FirebaseAuthInterop
3+
import Foundation
4+
5+
@available(iOS 15.0, macOS 12.0, macCatalyst 15.0, tvOS 15.0, watchOS 8.0, *)
6+
public final class ImagenModel {
7+
/// The resource name of the model in the backend; has the format "models/model-name".
8+
let modelResourceName: String
9+
10+
/// The backing service responsible for sending and receiving model requests to the backend.
11+
let generativeAIService: GenerativeAIService
12+
13+
/// Configuration parameters for sending requests to the backend.
14+
let requestOptions: RequestOptions
15+
16+
init(name: String,
17+
projectID: String,
18+
apiKey: String,
19+
requestOptions: RequestOptions,
20+
appCheck: AppCheckInterop?,
21+
auth: AuthInterop?,
22+
urlSession: URLSession = .shared) {
23+
modelResourceName = name
24+
generativeAIService = GenerativeAIService(
25+
projectID: projectID,
26+
apiKey: apiKey,
27+
appCheck: appCheck,
28+
auth: auth,
29+
urlSession: urlSession
30+
)
31+
self.requestOptions = requestOptions
32+
}
33+
34+
public func generateImages(prompt: String) async throws
35+
-> ImageGenerationResponse<ImagenInlineDataImage> {
36+
return try await generateImages(
37+
prompt: prompt,
38+
parameters: imageGenerationParameters(storageURI: nil)
39+
)
40+
}
41+
42+
public func generateImages(prompt: String, storageURI: String) async throws
43+
-> ImageGenerationResponse<ImagenFileDataImage> {
44+
return try await generateImages(
45+
prompt: prompt,
46+
parameters: imageGenerationParameters(storageURI: storageURI)
47+
)
48+
}
49+
50+
func generateImages<T: Decodable>(prompt: String,
51+
parameters: ImageGenerationParameters) async throws
52+
-> ImageGenerationResponse<T> {
53+
let request = ImageGenerationRequest<T>(
54+
model: modelResourceName,
55+
options: requestOptions,
56+
instances: [ImageGenerationInstance(prompt: prompt)],
57+
parameters: parameters
58+
)
59+
60+
return try await generativeAIService.loadRequest(request: request)
61+
}
62+
63+
func imageGenerationParameters(storageURI: String?) -> ImageGenerationParameters {
64+
// TODO(#14221): Add support for configuring these parameters.
65+
return ImageGenerationParameters(
66+
sampleCount: 1,
67+
storageURI: storageURI,
68+
seed: nil,
69+
negativePrompt: nil,
70+
aspectRatio: nil,
71+
safetyFilterLevel: nil,
72+
personGeneration: nil,
73+
outputOptions: nil,
74+
addWatermark: nil,
75+
includeResponsibleAIFilterReason: true
76+
)
77+
}
78+
}

FirebaseVertexAI/Sources/VertexAI.swift

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,18 @@ public class VertexAI {
104104
)
105105
}
106106

107+
public func imagenModel(modelName: String, requestOptions: RequestOptions = RequestOptions())
108+
-> ImagenModel {
109+
return ImagenModel(
110+
name: modelResourceName(modelName: modelName),
111+
projectID: projectID,
112+
apiKey: apiKey,
113+
requestOptions: requestOptions,
114+
appCheck: appCheck,
115+
auth: auth
116+
)
117+
}
118+
107119
/// Class to enable VertexAI to register via the Objective-C based Firebase component system
108120
/// to include VertexAI in the userAgent.
109121
@objc(FIRVertexAIComponent) class FirebaseVertexAIComponent: NSObject {}

0 commit comments

Comments
 (0)