diff --git a/.github/workflows/firebaseai.yml b/.github/workflows/firebaseai.yml index ba2cfa86c..87f7eab85 100644 --- a/.github/workflows/firebaseai.yml +++ b/.github/workflows/firebaseai.yml @@ -23,7 +23,7 @@ jobs: runs-on: macOS-15 strategy: matrix: - xcode: ["16.3"] + xcode: ["26_beta_5"] os: [iOS] include: - os: iOS diff --git a/firebaseai/ChatExample/Models/ChatMessage.swift b/firebaseai/ChatExample/Models/ChatMessage.swift index 33725083f..f1171c748 100644 --- a/firebaseai/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/ChatExample/Models/ChatMessage.swift @@ -14,35 +14,65 @@ import FirebaseAI import Foundation +import ConversationKit -enum Participant { - case system - case user -} +public struct ChatMessage: Message { + public let id: UUID = .init() + public var content: String? + public let imageURL: String? + public let participant: Participant + public let error: (any Error)? + public var pending = false + public var groundingMetadata: GroundingMetadata? -struct ChatMessage: Identifiable, Equatable { - let id = UUID().uuidString - var message: String - var groundingMetadata: GroundingMetadata? - let participant: Participant - var pending = false + public init(content: String? = nil, imageURL: String? = nil, participant: Participant, + error: (any Error)? = nil, pending: Bool = false) { + self.content = content + self.imageURL = imageURL + self.participant = participant + self.error = error + self.pending = pending + } - static func pending(participant: Participant) -> ChatMessage { - Self(message: "", participant: participant, pending: true) + // Protocol-required initializer + public init(content: String?, imageURL: String?, participant: Participant) { + self.content = content + self.imageURL = imageURL + self.participant = participant + error = nil } +} - // TODO(andrewheard): Add Equatable conformance to GroundingMetadata and remove this - static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { - lhs.id == rhs.id && lhs.message == rhs.message && lhs.participant == rhs.participant && lhs - .pending == rhs.pending +extension ChatMessage { + public static func pending(participant: Participant) -> ChatMessage { + Self(content: "", participant: participant, pending: true) } } +// Implement Equatable and Hashable for ChatMessage (ignore error) extension ChatMessage { + public static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { + lhs.id == rhs.id && + lhs.content == rhs.content && + lhs.imageURL == rhs.imageURL && + lhs.participant == rhs.participant + // intentionally ignore `error` + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + hasher.combine(content) + hasher.combine(imageURL) + hasher.combine(participant) + // intentionally ignore `error` + } +} + +public extension ChatMessage { static var samples: [ChatMessage] = [ - .init(message: "Hello. What can I do for you today?", participant: .system), - .init(message: "Show me a simple loop in Swift.", participant: .user), - .init(message: """ + .init(content: "Hello. What can I do for you today?", participant: .other), + .init(content: "Show me a simple loop in Swift.", participant: .user), + .init(content: """ Sure, here is a simple loop in Swift: # Example 1 @@ -65,13 +95,13 @@ extension ChatMessage { ``` This loop calculates the sum of the numbers from 1 to 100. The variable sum is initialized to 0, and then the for loop iterates over the range of numbers from 1 to 100. The variable i is assigned each number in the range, and the value of i is added to the sum variable. After the loop has finished executing, the value of sum is printed to the console. - """, participant: .system), + """, participant: .other), ] static var sample = samples[0] } -extension ChatMessage { +public extension ChatMessage { static func from(_ modelContent: ModelContent) -> ChatMessage? { // TODO: add non-text parts to message when multi-model support is added let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() @@ -79,9 +109,9 @@ extension ChatMessage { return nil } - let participant: Participant = (modelContent.role == "user") ? .user : .system + let participant: Participant = (modelContent.role == "user") ? .user : .other - return ChatMessage(message: text, participant: participant) + return ChatMessage(content: text, participant: participant) } static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { diff --git a/firebaseai/ChatExample/Screens/ChatScreen.swift b/firebaseai/ChatExample/Screens/ChatScreen.swift new file mode 100644 index 000000000..880a97ce4 --- /dev/null +++ b/firebaseai/ChatExample/Screens/ChatScreen.swift @@ -0,0 +1,69 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import SwiftUI +import ConversationKit + +struct ChatScreen: View { + let firebaseService: FirebaseAI + @StateObject var viewModel: ChatViewModel + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.firebaseService = firebaseService + _viewModel = + StateObject(wrappedValue: ChatViewModel(firebaseService: firebaseService, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + Task { + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + ChatScreen(firebaseService: FirebaseAI.firebaseAI()) +} diff --git a/firebaseai/ChatExample/Screens/ConversationScreen.swift b/firebaseai/ChatExample/Screens/ConversationScreen.swift deleted file mode 100644 index 6c6f7eac6..000000000 --- a/firebaseai/ChatExample/Screens/ConversationScreen.swift +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FirebaseAI -import GenerativeAIUIComponents -import SwiftUI - -struct ConversationScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: ConversationViewModel - - @State - private var userPrompt = "" - - init(firebaseService: FirebaseAI, sample: Sample? = nil) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - sample: sample)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) - } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .onTapGesture { - focusedField = nil - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") - } - } - } - .navigationTitle(viewModel.title) - .navigationBarTitleDisplayMode(.inline) - .onAppear { - focusedField = .message - // Set initial prompt from viewModel if available - if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { - userPrompt = viewModel.initialPrompt - } - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - focusedField = nil - - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() - } - } - - private func newChat() { - viewModel.startNewChat() - userPrompt = "" - } -} - -struct ConversationScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = ConversationViewModel(firebaseService: FirebaseAI - .firebaseAI(), sample: nil) // Example service init - - var body: some View { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) - } - } -} diff --git a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/ChatExample/ViewModels/ChatViewModel.swift similarity index 78% rename from firebaseai/ChatExample/ViewModels/ConversationViewModel.swift rename to firebaseai/ChatExample/ViewModels/ChatViewModel.swift index 676534a13..ad1077407 100644 --- a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/ChatExample/ViewModels/ChatViewModel.swift @@ -15,10 +15,9 @@ import FirebaseAI import Foundation import UIKit -import GenerativeAIUIComponents @MainActor -class ConversationViewModel: ObservableObject { +class ChatViewModel: ObservableObject { /// This array holds both the user's and the system's chat messages @Published var messages = [ChatMessage]() @@ -30,6 +29,8 @@ class ConversationViewModel: ObservableObject { return error != nil } + @Published var presentErrorDetails: Bool = false + @Published var initialPrompt: String = "" @Published var title: String = "" @@ -95,11 +96,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -107,7 +108,8 @@ class ConversationViewModel: ObservableObject { for try await chunk in responseStream { messages[messages.count - 1].pending = false if let text = chunk.text { - messages[messages.count - 1].message += text + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text } if let candidate = chunk.candidates.first { @@ -120,7 +122,11 @@ class ConversationViewModel: ObservableObject { } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } @@ -135,11 +141,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -148,7 +154,7 @@ class ConversationViewModel: ObservableObject { if let responseText = response?.text { // replace pending message with backend response - messages[messages.count - 1].message = responseText + messages[messages.count - 1].content = responseText messages[messages.count - 1].pending = false if let candidate = response?.candidates.first { @@ -160,7 +166,11 @@ class ConversationViewModel: ObservableObject { } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } diff --git a/firebaseai/ChatExample/Views/ErrorView.swift b/firebaseai/ChatExample/Views/ErrorView.swift deleted file mode 100644 index 55ea40369..000000000 --- a/firebaseai/ChatExample/Views/ErrorView.swift +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import FirebaseAI -import SwiftUI - -struct ErrorView: View { - var error: Error - @State private var isDetailsSheetPresented = false - var body: some View { - HStack { - Text("An error occurred.") - Button(action: { isDetailsSheetPresented.toggle() }) { - Image(systemName: "info.circle") - } - } - .frame(maxWidth: .infinity, alignment: .center) - .listRowSeparator(.hidden) - .sheet(isPresented: $isDetailsSheetPresented) { - ErrorDetailsView(error: error) - } - } -} - -#Preview { - NavigationView { - let errorPromptBlocked = GenerateContentError.promptBlocked( - response: GenerateContentResponse( - candidates: [ - Candidate( - content: ModelContent(role: "model", parts: [ - """ - A _hypothetical_ model response. - Cillum ex aliqua amet aliquip labore amet eiusmod consectetur reprehenderit sit commodo. - """, - ]), - safetyRatings: [ - SafetyRating( - category: .dangerousContent, - probability: .high, - probabilityScore: 0.8, - severity: .medium, - severityScore: 0.9, - blocked: true - ), - SafetyRating( - category: .harassment, - probability: .low, - probabilityScore: 0.5, - severity: .low, - severityScore: 0.6, - blocked: false - ), - SafetyRating( - category: .hateSpeech, - probability: .low, - probabilityScore: 0.3, - severity: .medium, - severityScore: 0.2, - blocked: false - ), - SafetyRating( - category: .sexuallyExplicit, - probability: .low, - probabilityScore: 0.2, - severity: .negligible, - severityScore: 0.5, - blocked: false - ), - ], - finishReason: FinishReason.other, - citationMetadata: nil - ), - ] - ) - ) - List { - MessageView(message: ChatMessage.samples[0]) - MessageView(message: ChatMessage.samples[1]) - ErrorView(error: errorPromptBlocked) - } - .listStyle(.plain) - .navigationTitle("Chat example") - } -} diff --git a/firebaseai/ChatExample/Views/MessageView.swift b/firebaseai/ChatExample/Views/MessageView.swift index b11c903e5..2242a02cd 100644 --- a/firebaseai/ChatExample/Views/MessageView.swift +++ b/firebaseai/ChatExample/Views/MessageView.swift @@ -37,17 +37,31 @@ extension View { } struct MessageContentView: View { + @Environment(\.presentErrorAction) var presentErrorAction var message: ChatMessage var body: some View { if message.pending { BouncingDots() } else { + // Error Message + if let error = message.error { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + presentErrorAction?(error) + } + .labelStyle(.iconOnly) + } + } + // Grounded Response - if let groundingMetadata = message.groundingMetadata { + else if let groundingMetadata = message.groundingMetadata { GroundedResponseView(message: message, groundingMetadata: groundingMetadata) - } else { - // Non-grounded response + } + + // Non-grounded response + else { ResponseTextView(message: message) } } @@ -58,11 +72,11 @@ struct ResponseTextView: View { var message: ChatMessage var body: some View { - Markdown(message.message) + Markdown(message.content ?? "") .markdownTextStyle { FontFamilyVariant(.normal) FontSize(.em(0.85)) - ForegroundColor(message.participant == .system ? Color(UIColor.label) : .white) + ForegroundColor(message.participant == .other ? Color(UIColor.label) : .white) } .markdownBlockStyle(\.codeBlock) { configuration in configuration.label @@ -83,24 +97,41 @@ struct ResponseTextView: View { struct MessageView: View { var message: ChatMessage + private var participantLabel: String { + message.participant == .user ? "User" : "Model" + } + var body: some View { - HStack { - if message.participant == .user { - Spacer() - } - MessageContentView(message: message) - .padding(10) - .background(message.participant == .system - ? Color(UIColor.systemFill) - : Color(UIColor.systemBlue)) - .roundedCorner(10, - corners: [ - .topLeft, - .topRight, - message.participant == .system ? .bottomRight : .bottomLeft, - ]) - if message.participant == .system { - Spacer() + VStack(alignment: message.participant == .user ? .trailing : .leading, spacing: 4) { + // Sender label + Text(participantLabel) + .font(.caption2) + .fontWeight(.medium) + .foregroundColor(.secondary) + .textCase(.uppercase) + .padding(.horizontal, 8) + .padding(.vertical, 2) + .frame(maxWidth: .infinity, alignment: message.participant == .user ? .trailing : .leading) + + // Message content + HStack { + if message.participant == .user { + Spacer() + } + MessageContentView(message: message) + .padding(10) + .background(message.participant == .other + ? Color(UIColor.systemFill) + : Color(UIColor.systemBlue)) + .roundedCorner(10, + corners: [ + .topLeft, + .topRight, + message.participant == .other ? .bottomRight : .bottomLeft, + ]) + if message.participant == .other { + Spacer() + } } } .listRowSeparator(.hidden) @@ -114,7 +145,7 @@ struct MessageView_Previews: PreviewProvider { MessageView(message: ChatMessage.samples[0]) MessageView(message: ChatMessage.samples[1]) MessageView(message: ChatMessage.samples[2]) - MessageView(message: ChatMessage(message: "Hello!", participant: .system, pending: true)) + MessageView(message: ChatMessage(content: "Hello!", participant: .other, pending: true)) } .listStyle(.plain) .navigationTitle("Chat example") diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index 443e12082..2217a26aa 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -3,30 +3,33 @@ archiveVersion = 1; classes = { }; - objectVersion = 60; + objectVersion = 56; objects = { /* Begin PBXBuildFile section */ - 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */; }; + 726490D92E3F39E000A92700 /* Sample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D22E3F39D200A92700 /* Sample.swift */; }; + 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D32E3F39D200A92700 /* UseCase.swift */; }; + 726490DC2E3F39E000A92703 /* InputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D72E3F39D900A92700 /* InputField.swift */; }; + 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */; }; 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72DA044E2E385DF3004FED7D /* ChatMessage.swift */; }; + 72E040752E448731003D4135 /* WeatherService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72E040742E44872C003D4135 /* WeatherService.swift */; }; 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */; }; 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */; }; - 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88263BEE2B239BFE008AB09B /* ErrorView.swift */; }; 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 889873842B208563005B4896 /* ErrorDetailsView.swift */; }; + 884298E12E4B8110005F535F /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 884298E02E4B8110005F535F /* ConversationKit */; }; 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */; }; 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8342B0D04BC007B434F /* ContentView.swift */; }; 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8362B0D04BD007B434F /* Assets.xcassets */; }; 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */; }; - 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */; }; - 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */; }; + 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 885D0CA02E4CB7CD00A217A0 /* ConversationKit */; }; 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95D72B17BA420036F07A /* MarkdownUI */; }; 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */; }; 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */; }; 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5A2B11133E00C08E95 /* MessageView.swift */; }; 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5C2B11135000C08E95 /* BouncingDots.swift */; }; - 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */; }; - 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */; }; + 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ChatViewModel.swift */; }; + 886F95E12B17D5010036F07A /* ChatScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ChatScreen.swift */; }; A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */; }; A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */; }; AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */; }; @@ -37,31 +40,29 @@ /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 726634072E37011C00554974 /* Package.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = Package.swift; path = GenerativeAIUIComponents/Package.swift; sourceTree = ""; }; + 726490D22E3F39D200A92700 /* Sample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Sample.swift; sourceTree = ""; }; + 726490D32E3F39D200A92700 /* UseCase.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UseCase.swift; sourceTree = ""; }; + 726490D72E3F39D900A92700 /* InputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputField.swift; sourceTree = ""; }; + 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalInputField.swift; sourceTree = ""; }; 72DA044E2E385DF3004FED7D /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; + 72E040742E44872C003D4135 /* WeatherService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WeatherService.swift; sourceTree = ""; }; 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingScreen.swift; sourceTree = ""; }; 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingViewModel.swift; sourceTree = ""; }; 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningViewModel.swift; sourceTree = ""; }; 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningScreen.swift; sourceTree = ""; }; - 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentScreen.swift; sourceTree = ""; }; - 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentViewModel.swift; sourceTree = ""; }; - 88263BEE2B239BFE008AB09B /* ErrorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ErrorView.swift; sourceTree = ""; }; 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FirebaseAIExampleApp.swift; sourceTree = ""; }; 8848C8342B0D04BC007B434F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 8848C8362B0D04BD007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 8848C84A2B0D051F007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 8848C85C2B0D056D007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 889873842B208563005B4896 /* ErrorDetailsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ErrorDetailsView.swift; sourceTree = ""; }; - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = GenerativeAIUIComponents; sourceTree = ""; }; 88E10F482B110D5400C08E95 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationScreen.swift; sourceTree = ""; }; - 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationViewModel.swift; sourceTree = ""; }; + 88E10F542B1112CA00C08E95 /* ChatScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatScreen.swift; sourceTree = ""; }; + 88E10F562B1112F600C08E95 /* ChatViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatViewModel.swift; sourceTree = ""; }; 88E10F5A2B11133E00C08E95 /* MessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageView.swift; sourceTree = ""; }; 88E10F5C2B11135000C08E95 /* BouncingDots.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BouncingDots.swift; sourceTree = ""; }; A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilterChipView.swift; sourceTree = ""; }; @@ -77,15 +78,43 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 884298E12E4B8110005F535F /* ConversationKit in Frameworks */, DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, + 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */, 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, - 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 726490D12E3F39C900A92700 /* UIComponents */ = { + isa = PBXGroup; + children = ( + 726490D42E3F39D200A92700 /* Models */, + 726490D62E3F39D600A92700 /* Views */, + ); + path = UIComponents; + sourceTree = ""; + }; + 726490D42E3F39D200A92700 /* Models */ = { + isa = PBXGroup; + children = ( + 726490D22E3F39D200A92700 /* Sample.swift */, + 726490D32E3F39D200A92700 /* UseCase.swift */, + ); + path = Models; + sourceTree = ""; + }; + 726490D62E3F39D600A92700 /* Views */ = { + isa = PBXGroup; + children = ( + 726490D72E3F39D900A92700 /* InputField.swift */, + 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */, + ); + path = Views; + sourceTree = ""; + }; 72DA044D2E385DED004FED7D /* Models */ = { isa = PBXGroup; children = ( @@ -94,6 +123,14 @@ path = Models; sourceTree = ""; }; + 72E040732E448720003D4135 /* Services */ = { + isa = PBXGroup; + children = ( + 72E040742E44872C003D4135 /* WeatherService.swift */, + ); + path = Services; + sourceTree = ""; + }; 86C1F47F2BC726150026816F /* Screens */ = { isa = PBXGroup; children = ( @@ -113,6 +150,7 @@ 86C1F4822BC726150026816F /* FunctionCallingExample */ = { isa = PBXGroup; children = ( + 72E040732E448720003D4135 /* Services */, 86C1F4812BC726150026816F /* ViewModels */, 86C1F47F2BC726150026816F /* Screens */, ); @@ -135,26 +173,9 @@ path = Screens; sourceTree = ""; }; - 88209C1A2B0FBDC300F64795 /* Screens */ = { - isa = PBXGroup; - children = ( - 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */, - ); - path = Screens; - sourceTree = ""; - }; - 88209C1C2B0FBDC300F64795 /* ViewModels */ = { - isa = PBXGroup; - children = ( - 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */, - ); - path = ViewModels; - sourceTree = ""; - }; 88209C222B0FBE1700F64795 /* Frameworks */ = { isa = PBXGroup; children = ( - 726634072E37011C00554974 /* Package.swift */, ); name = Frameworks; sourceTree = ""; @@ -162,11 +183,10 @@ 8848C8262B0D04BC007B434F = { isa = PBXGroup; children = ( + 726490D12E3F39C900A92700 /* UIComponents */, DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */, - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */, 869200B22B879C4F00482873 /* GoogleService-Info.plist */, 8848C8312B0D04BC007B434F /* FirebaseAIExample */, - 8848C8452B0D051E007B434F /* GenerativeAITextExample */, 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */, 88E10F432B110D5300C08E95 /* ChatExample */, 86C1F4822BC726150026816F /* FunctionCallingExample */, @@ -203,25 +223,6 @@ path = "Preview Content"; sourceTree = ""; }; - 8848C8452B0D051E007B434F /* GenerativeAITextExample */ = { - isa = PBXGroup; - children = ( - 88209C1C2B0FBDC300F64795 /* ViewModels */, - 88209C1A2B0FBDC300F64795 /* Screens */, - 8848C84A2B0D051F007B434F /* Assets.xcassets */, - 8848C84C2B0D051F007B434F /* Preview Content */, - ); - path = GenerativeAITextExample; - sourceTree = ""; - }; - 8848C84C2B0D051F007B434F /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */ = { isa = PBXGroup; children = ( @@ -265,7 +266,7 @@ 88E10F502B11123600C08E95 /* ViewModels */ = { isa = PBXGroup; children = ( - 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */, + 88E10F562B1112F600C08E95 /* ChatViewModel.swift */, ); path = ViewModels; sourceTree = ""; @@ -274,7 +275,6 @@ isa = PBXGroup; children = ( AEE793DE2E256D3900708F02 /* Grounding */, - 88263BEE2B239BFE008AB09B /* ErrorView.swift */, 88E10F5A2B11133E00C08E95 /* MessageView.swift */, 88E10F5C2B11135000C08E95 /* BouncingDots.swift */, 889873842B208563005B4896 /* ErrorDetailsView.swift */, @@ -285,7 +285,7 @@ 88E10F532B1112B900C08E95 /* Screens */ = { isa = PBXGroup; children = ( - 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */, + 88E10F542B1112CA00C08E95 /* ChatScreen.swift */, ); path = Screens; sourceTree = ""; @@ -336,7 +336,8 @@ packageProductDependencies = ( 886F95D72B17BA420036F07A /* MarkdownUI */, DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, - 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */, + 884298E02E4B8110005F535F /* ConversationKit */, + 885D0CA02E4CB7CD00A217A0 /* ConversationKit */, ); productName = GenerativeAIExample; productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; @@ -370,7 +371,7 @@ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, - 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */, + 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */, ); productRefGroup = 8848C8302B0D04BC007B434F /* Products */; projectDirPath = ""; @@ -404,22 +405,24 @@ 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */, 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */, 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */, - 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */, 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */, - 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */, + 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */, 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */, 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */, DEFECAA92D7B4CCD00EF9621 /* ImagenViewModel.swift in Sources */, DEFECAAA2D7B4CCD00EF9621 /* ImagenScreen.swift in Sources */, 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */, - 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */, - 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */, + 72E040752E448731003D4135 /* WeatherService.swift in Sources */, + 886F95E12B17D5010036F07A /* ChatScreen.swift in Sources */, 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */, - 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */, A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */, A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */, AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */, AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */, + 726490D92E3F39E000A92700 /* Sample.swift in Sources */, + 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */, + 726490DC2E3F39E000A92703 /* InputField.swift in Sources */, + 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -478,7 +481,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; @@ -535,7 +538,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; @@ -628,13 +631,6 @@ }; /* End XCConfigurationList section */ -/* Begin XCLocalSwiftPackageReference section */ - 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */ = { - isa = XCLocalSwiftPackageReference; - relativePath = GenerativeAIUIComponents; - }; -/* End XCLocalSwiftPackageReference section */ - /* Begin XCRemoteSwiftPackageReference section */ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; @@ -644,6 +640,14 @@ revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; }; }; + 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */ = { + isa = XCRemoteSwiftPackageReference; + repositoryURL = "https://github.com/peterfriese/ConversationKit"; + requirement = { + kind = upToNextMajorVersion; + minimumVersion = 0.0.2; + }; + }; DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */ = { isa = XCRemoteSwiftPackageReference; repositoryURL = "https://github.com/gonzalezreal/NetworkImage"; @@ -663,9 +667,14 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */ = { + 884298E02E4B8110005F535F /* ConversationKit */ = { + isa = XCSwiftPackageProductDependency; + productName = ConversationKit; + }; + 885D0CA02E4CB7CD00A217A0 /* ConversationKit */ = { isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; + package = 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */; + productName = ConversationKit; }; 886F95D72B17BA420036F07A /* MarkdownUI */ = { isa = XCSwiftPackageProductDependency; diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 0cc5b16a5..5af66fad7 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -14,7 +14,6 @@ import SwiftUI import FirebaseAI -import GenerativeAIUIComponents enum BackendOption: String, CaseIterable, Identifiable { case googleAI = "Gemini Developer API" @@ -112,14 +111,14 @@ struct ContentView: View { @ViewBuilder private func destinationView(for sample: Sample) -> some View { switch sample.navRoute { - case "ConversationScreen": - ConversationScreen(firebaseService: firebaseService, sample: sample) + case "ChatScreen": + ChatScreen(firebaseService: firebaseService, sample: sample) case "ImagenScreen": ImagenScreen(firebaseService: firebaseService, sample: sample) case "PhotoReasoningScreen": PhotoReasoningScreen(firebaseService: firebaseService) case "FunctionCallingScreen": - FunctionCallingScreen(firebaseService: firebaseService) + FunctionCallingScreen(firebaseService: firebaseService, sample: sample) default: EmptyView() } diff --git a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift index 8c6ad2bf1..b3701db1e 100644 --- a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift +++ b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift @@ -13,7 +13,6 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents struct FilterChipView: View { let useCase: UseCase diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift index 139a391b5..af4c4680b 100644 --- a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -13,7 +13,6 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents struct SampleCardView: View { let sample: Sample diff --git a/firebaseai/FunctionCallingExample/Screens/FunctionCallingScreen.swift b/firebaseai/FunctionCallingExample/Screens/FunctionCallingScreen.swift index 474a55ede..732900985 100644 --- a/firebaseai/FunctionCallingExample/Screens/FunctionCallingScreen.swift +++ b/firebaseai/FunctionCallingExample/Screens/FunctionCallingScreen.swift @@ -13,101 +13,49 @@ // limitations under the License. import FirebaseAI -import GenerativeAIUIComponents import SwiftUI +import ConversationKit struct FunctionCallingScreen: View { let firebaseService: FirebaseAI @StateObject var viewModel: FunctionCallingViewModel - @State - private var userPrompt = "What is 100 Euros in U.S. Dollars?" - - init(firebaseService: FirebaseAI) { + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService _viewModel = - StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message + StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService, + sample: sample)) } - @FocusState - var focusedField: FocusedField? - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - Text("Interact with a currency conversion API using function calling in Gemini.") - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - .onTapGesture { - focusedField = nil + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + Task { + await viewModel.sendMessage(message.content ?? "", streaming: true) } } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) + .onError { error in + viewModel.presentErrorDetails = true } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) } } - } - .navigationTitle("Function Calling") - .onAppear { - focusedField = .message - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) } } @@ -116,21 +64,6 @@ struct FunctionCallingScreen: View { } } -struct FunctionCallingScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = FunctionCallingViewModel(firebaseService: FirebaseAI.firebaseAI()) - - var body: some View { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - } - } +#Preview { + FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) } diff --git a/firebaseai/FunctionCallingExample/Services/WeatherService.swift b/firebaseai/FunctionCallingExample/Services/WeatherService.swift new file mode 100644 index 000000000..ccadcda1f --- /dev/null +++ b/firebaseai/FunctionCallingExample/Services/WeatherService.swift @@ -0,0 +1,27 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import Foundation +import UIKit + +class WeatherService { + public static func fetchWeather(city: String, state: String, date: String) -> JSONObject { + return [ + "temperature": .number(38), + "chancePrecipitation": .string("56%"), + "cloudCover": .string("partlyCloudy"), + ] + } +} diff --git a/firebaseai/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift b/firebaseai/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift index 8bd7f90ef..e288ef585 100644 --- a/firebaseai/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift +++ b/firebaseai/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift @@ -29,39 +29,57 @@ class FunctionCallingViewModel: ObservableObject { return error != nil } - /// Function calls pending processing - private var functionCalls = [FunctionCallPart]() + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" private var model: GenerativeModel private var chat: Chat private var chatTask: Task? - init(firebaseService: FirebaseAI) { // Accept FirebaseAI instance - model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: [.functionDeclarations([ - FunctionDeclaration( - name: "get_exchange_rate", - description: "Get the exchange rate for currencies between countries", - parameters: [ - "currency_from": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert from in ISO 4217 format" - ), - "currency_to": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert to in ISO 4217 format" - ), - ] - ), - ])] - ) - chat = model.startChat() // Initialize chat with the model from the service + private var sample: Sample? + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample + + // create a generative model with sample data + model = firebaseService.generativeModel( + modelName: "gemini-2.0-flash-001", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + chat = model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" } func sendMessage(_ text: String, streaming: Bool = true) async { error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { chatTask?.cancel() chatTask = Task { @@ -71,185 +89,182 @@ class FunctionCallingViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) - print(messages) do { - repeat { - if streaming { - try await internalSendMessageStreaming(text) + let responseStream = try chat.sendMessageStream(text) + + for try await chunk in responseStream { + if !chunk.functionCalls.isEmpty { + try await handleFunctionCallsStreaming(chunk) } else { - try await internalSendMessage(text) + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + messages[messages.count - 1].pending = false + } } - } while !functionCalls.isEmpty + } + } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } - func startNewChat() { - stop() - error = nil - chat = model.startChat() - messages.removeAll() - } - - func stop() { + private func internalSendMessage(_ text: String) async { chatTask?.cancel() - error = nil - } - private func internalSendMessageStreaming(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let responseStream: AsyncThrowingStream - if functionResponses.isEmpty { - responseStream = try chat.sendMessageStream(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) + chatTask = Task { + busy = true + defer { + busy = false } - responseStream = try chat.sendMessageStream([functionResponses.modelContent()]) - } - for try await chunk in responseStream { - processResponseContent(content: chunk) - } - } - private func internalSendMessage(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let response: GenerateContentResponse - if functionResponses.isEmpty { - response = try await chat.sendMessage(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) - } - response = try await chat.sendMessage([functionResponses.modelContent()]) - } - processResponseContent(content: response) - } + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) - func processResponseContent(content: GenerateContentResponse) { - guard let candidate = content.candidates.first else { - fatalError("No candidate.") - } + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) - for part in candidate.content.parts { - switch part { - case let textPart as TextPart: - // replace pending message with backend response - messages[messages.count - 1].message += textPart.text - messages[messages.count - 1].pending = false - case let functionCallPart as FunctionCallPart: - messages.insert(functionCallPart.chatMessage(), at: messages.count - 1) - functionCalls.append(functionCallPart) - default: - fatalError("Unsupported response part: \(part)") + do { + let response = try await chat.sendMessage(text) + + if !response.functionCalls.isEmpty { + try await handleFunctionCalls(response) + } else { + if let responseText = response.text { + // replace pending message with backend response + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } - func processFunctionCalls() async throws -> [FunctionResponsePart] { + private func handleFunctionCallsStreaming(_ response: GenerateContentResponse) async throws { var functionResponses = [FunctionResponsePart]() - for functionCall in functionCalls { + + for functionCall in response.functionCalls { switch functionCall.name { - case "get_exchange_rate": - let exchangeRates = getExchangeRate(args: functionCall.args) - functionResponses.append(FunctionResponsePart( - name: "get_exchange_rate", - response: exchangeRates - )) + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) default: - fatalError("Unknown function named \"\(functionCall.name)\".") + print("Unknown function named \"\(functionCall.name)\".") } } - functionCalls = [] - return functionResponses - } - - // MARK: - Callable Functions - - func getExchangeRate(args: JSONObject) -> JSONObject { - // 1. Validate and extract the parameters provided by the model (from a `FunctionCall`) - guard case let .string(from) = args["currency_from"] else { - fatalError("Missing `currency_from` parameter.") - } - guard case let .string(to) = args["currency_to"] else { - fatalError("Missing `currency_to` parameter.") - } - - // 2. Get the exchange rate - let allRates: [String: [String: Double]] = [ - "AUD": ["CAD": 0.89265, "EUR": 0.6072, "GBP": 0.51714, "JPY": 97.75, "USD": 0.66379], - "CAD": ["AUD": 1.1203, "EUR": 0.68023, "GBP": 0.57933, "JPY": 109.51, "USD": 0.74362], - "EUR": ["AUD": 1.6469, "CAD": 1.4701, "GBP": 0.85168, "JPY": 160.99, "USD": 1.0932], - "GBP": ["AUD": 1.9337, "CAD": 1.7261, "EUR": 1.1741, "JPY": 189.03, "USD": 1.2836], - "JPY": ["AUD": 0.01023, "CAD": 0.00913, "EUR": 0.00621, "GBP": 0.00529, "USD": 0.00679], - "USD": ["AUD": 1.5065, "CAD": 1.3448, "EUR": 0.91475, "GBP": 0.77907, "JPY": 147.26], - ] - guard let fromRates = allRates[from] else { - return ["error": .string("No data for currency \(from).")] - } - guard let toRate = fromRates[to] else { - return ["error": .string("No data for currency \(to).")] + if !functionResponses.isEmpty { + let finalResponse = try await chat + .sendMessageStream([ModelContent(role: "function", parts: functionResponses)]) + + for try await chunk in finalResponse { + guard let candidate = chunk.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response chunk"] + ) + } + + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } + } } - - // 3. Return the exchange rates as a JSON object (returned to the model in a `FunctionResponse`) - return ["rates": .number(toRate)] } -} -private extension FunctionCallPart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted + private func handleFunctionCalls(_ response: GenerateContentResponse) async throws { + var functionResponses = [FunctionResponsePart]() - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") + for functionCall in response.functionCalls { + switch functionCall.name { + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) + default: + print("Unknown function named \"\(functionCall.name)\".") + } } - let messageText = "Function call requested by model:\n```\n\(json)\n```" - return ChatMessage(message: messageText, participant: .system) - } -} + if !functionResponses.isEmpty { + let finalResponse = try await chat + .sendMessage([ModelContent(role: "function", parts: functionResponses)]) -private extension FunctionResponsePart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted + guard let candidate = finalResponse.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response"] + ) + } - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } } - let messageText = "Function response returned by app:\n```\n\(json)\n```" - - return ChatMessage(message: messageText, participant: .user) - } -} - -private extension [FunctionResponsePart] { - func modelContent() -> ModelContent { - return ModelContent(role: "function", parts: self) } } diff --git a/firebaseai/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift b/firebaseai/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift index 48cea4882..b1a992eae 100644 --- a/firebaseai/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift +++ b/firebaseai/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -import GenerativeAIUIComponents import MarkdownUI import PhotosUI import SwiftUI diff --git a/firebaseai/GenerativeAIUIComponents/Package.swift b/firebaseai/GenerativeAIUIComponents/Package.swift deleted file mode 100644 index b174a6284..000000000 --- a/firebaseai/GenerativeAIUIComponents/Package.swift +++ /dev/null @@ -1,41 +0,0 @@ -// swift-tools-version: 5.9 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -import PackageDescription - -let package = Package( - name: "GenerativeAIUIComponents", - platforms: [ - .iOS(.v16), - ], - products: [ - .library( - name: "GenerativeAIUIComponents", - targets: ["GenerativeAIUIComponents"] - ), - ], - dependencies: [ - .package(url: "https://github.com/firebase/firebase-ios-sdk.git", from: "12.0.0"), - ], - targets: [ - .target( - name: "GenerativeAIUIComponents", - dependencies: [ - .product(name: "FirebaseAI", package: "firebase-ios-sdk"), - ] - ), - ] -) diff --git a/firebaseai/ImagenScreen/ImagenScreen.swift b/firebaseai/ImagenScreen/ImagenScreen.swift index a2d2b9fee..4d546dc94 100644 --- a/firebaseai/ImagenScreen/ImagenScreen.swift +++ b/firebaseai/ImagenScreen/ImagenScreen.swift @@ -13,8 +13,8 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents import FirebaseAI +import ConversationKit struct ImagenScreen: View { let firebaseService: FirebaseAI @@ -41,14 +41,21 @@ struct ImagenScreen: View { ZStack { ScrollView { VStack { - InputField("Enter a prompt to generate an image", text: $userPrompt) { - Image( - systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" - ) - .font(.title) + MessageComposerView(message: $userPrompt) + .padding(.bottom, 10) + .focused($focusedField, equals: .message) + .disableAttachments() + .onSubmitAction { sendOrStop() } + + if let error = viewModel.error { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + viewModel.presentErrorDetails = true + } + .labelStyle(.iconOnly) + } } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } let spacing: CGFloat = 10 LazyVGrid(columns: [ @@ -73,7 +80,13 @@ struct ImagenScreen: View { .onTapGesture { focusedField = nil } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } .navigationTitle("Imagen example") + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { diff --git a/firebaseai/ImagenScreen/ImagenViewModel.swift b/firebaseai/ImagenScreen/ImagenViewModel.swift index d4d7f0e6e..3b66dbfa8 100644 --- a/firebaseai/ImagenScreen/ImagenViewModel.swift +++ b/firebaseai/ImagenScreen/ImagenViewModel.swift @@ -16,7 +16,6 @@ import FirebaseAI import Foundation import OSLog import SwiftUI -import GenerativeAIUIComponents @MainActor class ImagenViewModel: ObservableObject { @@ -29,7 +28,13 @@ class ImagenViewModel: ObservableObject { var images = [UIImage]() @Published - var errorMessage: String? + var error: Error? + var hasError: Bool { + return error != nil + } + + @Published + var presentErrorDetails: Bool = false @Published var inProgress = false @@ -84,6 +89,7 @@ class ImagenViewModel: ObservableObject { } } catch { if !Task.isCancelled { + self.error = error logger.error("Error generating images: \(error)") } } diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/UIComponents/Models/Sample.swift similarity index 87% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift rename to firebaseai/UIComponents/Models/Sample.swift index 3d7637a77..65649235b 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/UIComponents/Models/Sample.swift @@ -53,7 +53,7 @@ extension Sample { description: "The user wants the model to help a new traveler" + " with travel tips", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent( role: "user", @@ -79,7 +79,7 @@ extension Sample { title: "Chatbot recommendations for courses", description: "A chatbot suggests courses for a performing arts program.", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + " program. You help students decide what course they will" + @@ -90,7 +90,7 @@ extension Sample { title: "Blog post creator", description: "Create a blog post from an image file stored in Cloud Storage.", useCases: [.image], - navRoute: "PhotoReasoningScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), ModelContent( @@ -111,7 +111,7 @@ extension Sample { title: "Gemini 2.0 Flash - image generation", description: "Generate and/or edit images using Gemini 2.0 Flash", useCases: [.image], - navRoute: "PhotoReasoningScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), ModelContent( @@ -126,7 +126,7 @@ extension Sample { title: "Hashtags for a video", description: "Generate hashtags for a video ad stored in Cloud Storage.", useCases: [.video], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), ModelContent( @@ -140,7 +140,7 @@ extension Sample { title: "Summarize video", description: "Summarize a video and extract important dialogue.", useCases: [.video], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this video for me?"), ModelContent( @@ -155,7 +155,7 @@ extension Sample { title: "Audio Summarization", description: "Summarize an audio file", useCases: [.audio], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this audio recording?"), ModelContent( @@ -169,7 +169,7 @@ extension Sample { title: "Translation from audio", description: "Translate an audio file stored in Cloud Storage", useCases: [.audio], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), ModelContent( @@ -185,7 +185,7 @@ extension Sample { description: "Compare the contents of 2 documents." + " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", useCases: [.document], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you compare these two documents for me?"), ModelContent( @@ -197,18 +197,31 @@ extension Sample { ), // Function Calling Sample( - title: "Currency conversion", - description: "Use function calling to convert currency", + title: "Weather Chat", + description: "Use function calling to get the weather conditions" + + " for a specific US city on a specific date.", useCases: [.functionCalling, .text], navRoute: "FunctionCallingScreen", - initialPrompt: "What is 100 Euros in USD?" + initialPrompt: "What was the weather in Boston, MA on October 17, 2024?", + tools: [.functionDeclarations([ + FunctionDeclaration( + name: "fetchWeather", + description: "Get the weather conditions for a specific US city on a specific date", + parameters: [ + "city": .string(description: "The US city of the location"), + "state": .string(description: "The US state of the location"), + "date": .string(description: "The date for which to get the weather." + + " Date must be in the format: YYYY-MM-DD"), + ] + ), + ])] ), // Grounding Sample( title: "Grounding with Google Search", description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", initialPrompt: "What's the weather in Chicago this weekend?", tools: [.googleSearch()] ), diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift b/firebaseai/UIComponents/Models/UseCase.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift rename to firebaseai/UIComponents/Models/UseCase.swift diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift b/firebaseai/UIComponents/Views/InputField.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift rename to firebaseai/UIComponents/Views/InputField.swift diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/MultimodalInputField.swift b/firebaseai/UIComponents/Views/MultimodalInputField.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/MultimodalInputField.swift rename to firebaseai/UIComponents/Views/MultimodalInputField.swift diff --git a/scripts/test.sh b/scripts/test.sh index de3c3c20a..92868bc4e 100755 --- a/scripts/test.sh +++ b/scripts/test.sh @@ -20,7 +20,10 @@ set -euo pipefail -if [ -d "/Applications/Xcode_16.2.app" ]; then +if [ -d "/Applications/Xcode_26_beta_5.app" ]; then + xcode_version="26_beta_5" + iphone_version="16" +elif [ -d "/Applications/Xcode_16.2.app" ]; then xcode_version="16.2" iphone_version="16" else