From dce0688a9d8ed820ca04e5bb560e83e89b7a9b59 Mon Sep 17 00:00:00 2001 From: haibo Date: Wed, 30 Jul 2025 15:56:16 -0700 Subject: [PATCH 1/7] feat: refactor main menu layout and clean up chat-related components --- .../ChatExample/Models/ChatMessage.swift | 27 +++ .../Screens/ConversationScreen.swift | 21 +- .../ViewModels/ConversationViewModel.swift | 33 ++- .../project.pbxproj | 62 +++-- .../FirebaseAIExample/ContentView.swift | 109 +++++---- .../Views/FilterChipView.swift | 33 +++ .../Views/SampleCardView.swift | 36 +++ .../GenerativeAIUIComponents/Package.swift | 8 +- .../Models/Sample.swift | 220 ++++++++++++++++++ .../Models/UseCase.swift | 26 +++ firebaseai/ImagenScreen/ImagenScreen.swift | 25 +- firebaseai/ImagenScreen/ImagenViewModel.swift | 11 +- 12 files changed, 528 insertions(+), 83 deletions(-) create mode 100644 firebaseai/FirebaseAIExample/Views/FilterChipView.swift create mode 100644 firebaseai/FirebaseAIExample/Views/SampleCardView.swift create mode 100644 firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift create mode 100644 firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift diff --git a/firebaseai/ChatExample/Models/ChatMessage.swift b/firebaseai/ChatExample/Models/ChatMessage.swift index 345337937..d0ba71db8 100644 --- a/firebaseai/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/ChatExample/Models/ChatMessage.swift @@ -70,3 +70,30 @@ extension ChatMessage { static var sample = samples[0] } + +extension ChatMessage { + // Convert ModelContent to ChatMessage + static func from(_ modelContent: ModelContent) -> ChatMessage? { + // Extract text from parts - parts is Array + guard let textPart = modelContent.parts.first as? TextPart else { + return nil + } + + let participant: Participant + switch modelContent.role { + case "user": + participant = .user + case "model": + participant = .system + default: + return nil + } + + return ChatMessage(message: textPart.text, participant: participant) + } + + // Convert array of ModelContent to array of ChatMessage + static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { + return modelContents.compactMap { from($0) } + } +} diff --git a/firebaseai/ChatExample/Screens/ConversationScreen.swift b/firebaseai/ChatExample/Screens/ConversationScreen.swift index 2b3e5cc10..d5a70f83d 100644 --- a/firebaseai/ChatExample/Screens/ConversationScreen.swift +++ b/firebaseai/ChatExample/Screens/ConversationScreen.swift @@ -24,16 +24,11 @@ struct ConversationScreen: View { @State private var userPrompt = "" - init(firebaseService: FirebaseAI, title: String, searchGroundingEnabled: Bool = false) { - let model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: searchGroundingEnabled ? [.googleSearch()] : [] - ) - self.title = title + init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { self.firebaseService = firebaseService _viewModel = StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - model: model)) + sampleId: sampleId)) } enum FocusedField: Hashable { @@ -89,15 +84,24 @@ struct ConversationScreen: View { focusedField = nil } .toolbar { + ToolbarItem(placement: .principal) { + Text(viewModel.title) + .font(.system(size: 24, weight: .bold)) + .foregroundColor(.primary) + .padding(.top, 10) + } ToolbarItem(placement: .primaryAction) { Button(action: newChat) { Image(systemName: "square.and.pencil") } } } - .navigationTitle(title) .onAppear { focusedField = .message + // Set initial prompt from viewModel if available + if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { + userPrompt = viewModel.initialPrompt + } } } @@ -121,6 +125,7 @@ struct ConversationScreen: View { private func newChat() { viewModel.startNewChat() + userPrompt = "" } } diff --git a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift index 98a237ec3..ace4fccd3 100644 --- a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift @@ -15,6 +15,7 @@ import FirebaseAI import Foundation import UIKit +import GenerativeAIUIComponents @MainActor class ConversationViewModel: ObservableObject { @@ -29,21 +30,38 @@ class ConversationViewModel: ObservableObject { return error != nil } + @Published var initialPrompt: String = "" + @Published var title: String = "" + private var model: GenerativeModel private var chat: Chat private var stopGenerating = false private var chatTask: Task? - init(firebaseService: FirebaseAI, model: GenerativeModel? = nil) { - if let model { - self.model = model + private var sample: Sample? + + init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { + // retrieve sample from sampleId + sample = Sample.find(by: sampleId) + + // create a generative model with sample data + model = firebaseService.generativeModel( + modelName: "gemini-2.0-flash-001", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { + // Initialize with sample chat history if it's available + messages = ChatMessage.from(chatHistory) + chat = model.startChat(history: chatHistory) } else { - self.model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001" - ) + chat = model.startChat() } - chat = self.model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" } func sendMessage(_ text: String, streaming: Bool = true) async { @@ -60,6 +78,7 @@ class ConversationViewModel: ObservableObject { error = nil chat = model.startChat() messages.removeAll() + initialPrompt = "" } func stop() { diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index bd44485ab..443e12082 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -3,10 +3,12 @@ archiveVersion = 1; classes = { }; - objectVersion = 56; + objectVersion = 60; objects = { /* Begin PBXBuildFile section */ + 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */; }; + 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72DA044E2E385DF3004FED7D /* ChatMessage.swift */; }; 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */; }; 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */; }; @@ -22,11 +24,11 @@ 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */; }; 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */; }; 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5A2B11133E00C08E95 /* MessageView.swift */; }; - 886F95DE2B17D5010036F07A /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F582B11131900C08E95 /* ChatMessage.swift */; }; 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5C2B11135000C08E95 /* BouncingDots.swift */; }; 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */; }; 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */; }; - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */; }; + A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */; }; + A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */; }; AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */; }; AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */; }; DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */ = {isa = PBXBuildFile; productRef = DE26D95E2DBB3E9F007E6668 /* FirebaseAI */; }; @@ -35,6 +37,8 @@ /* End PBXBuildFile section */ /* Begin PBXFileReference section */ + 726634072E37011C00554974 /* Package.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = Package.swift; path = GenerativeAIUIComponents/Package.swift; sourceTree = ""; }; + 72DA044E2E385DF3004FED7D /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingScreen.swift; sourceTree = ""; }; 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingViewModel.swift; sourceTree = ""; }; @@ -58,9 +62,10 @@ 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationScreen.swift; sourceTree = ""; }; 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationViewModel.swift; sourceTree = ""; }; - 88E10F582B11131900C08E95 /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; 88E10F5A2B11133E00C08E95 /* MessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageView.swift; sourceTree = ""; }; 88E10F5C2B11135000C08E95 /* BouncingDots.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BouncingDots.swift; sourceTree = ""; }; + A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilterChipView.swift; sourceTree = ""; }; + A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleCardView.swift; sourceTree = ""; }; AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GoogleSearchSuggestionView.swift; sourceTree = ""; }; AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GroundedResponseView.swift; sourceTree = ""; }; DEFECAA62D7B4CCD00EF9621 /* ImagenScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagenScreen.swift; sourceTree = ""; }; @@ -74,13 +79,21 @@ files = ( DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */, + 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 72DA044D2E385DED004FED7D /* Models */ = { + isa = PBXGroup; + children = ( + 72DA044E2E385DF3004FED7D /* ChatMessage.swift */, + ); + path = Models; + sourceTree = ""; + }; 86C1F47F2BC726150026816F /* Screens */ = { isa = PBXGroup; children = ( @@ -141,6 +154,7 @@ 88209C222B0FBE1700F64795 /* Frameworks */ = { isa = PBXGroup; children = ( + 726634072E37011C00554974 /* Package.swift */, ); name = Frameworks; sourceTree = ""; @@ -176,6 +190,7 @@ 8848C8342B0D04BC007B434F /* ContentView.swift */, 8848C8362B0D04BD007B434F /* Assets.xcassets */, 8848C8382B0D04BD007B434F /* Preview Content */, + A5E8E3C22C3B4F388A7A4A12 /* Views */, ); path = FirebaseAIExample; sourceTree = ""; @@ -229,7 +244,7 @@ 88E10F432B110D5300C08E95 /* ChatExample */ = { isa = PBXGroup; children = ( - 88E10F522B11124A00C08E95 /* Models */, + 72DA044D2E385DED004FED7D /* Models */, 88E10F502B11123600C08E95 /* ViewModels */, 88E10F512B11124100C08E95 /* Views */, 88E10F532B1112B900C08E95 /* Screens */, @@ -267,20 +282,21 @@ path = Views; sourceTree = ""; }; - 88E10F522B11124A00C08E95 /* Models */ = { + 88E10F532B1112B900C08E95 /* Screens */ = { isa = PBXGroup; children = ( - 88E10F582B11131900C08E95 /* ChatMessage.swift */, + 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */, ); - path = Models; + path = Screens; sourceTree = ""; }; - 88E10F532B1112B900C08E95 /* Screens */ = { + A5E8E3C22C3B4F388A7A4A12 /* Views */ = { isa = PBXGroup; children = ( - 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */, + A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */, + A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */, ); - path = Screens; + path = Views; sourceTree = ""; }; AEE793DE2E256D3900708F02 /* Grounding */ = { @@ -319,8 +335,8 @@ name = FirebaseAIExample; packageProductDependencies = ( 886F95D72B17BA420036F07A /* MarkdownUI */, - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */, DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, + 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */, ); productName = GenerativeAIExample; productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; @@ -354,6 +370,7 @@ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, + 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */, ); productRefGroup = 8848C8302B0D04BC007B434F /* Products */; projectDirPath = ""; @@ -385,7 +402,6 @@ 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */, 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */, 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */, - 886F95DE2B17D5010036F07A /* ChatMessage.swift in Sources */, 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */, 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */, 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */, @@ -398,7 +414,10 @@ 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */, 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */, 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */, + 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */, 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */, + A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */, + A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */, AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */, AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */, ); @@ -609,6 +628,13 @@ }; /* End XCConfigurationList section */ +/* Begin XCLocalSwiftPackageReference section */ + 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = GenerativeAIUIComponents; + }; +/* End XCLocalSwiftPackageReference section */ + /* Begin XCRemoteSwiftPackageReference section */ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; @@ -637,15 +663,15 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ + 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */ = { + isa = XCSwiftPackageProductDependency; + productName = GenerativeAIUIComponents; + }; 886F95D72B17BA420036F07A /* MarkdownUI */ = { isa = XCSwiftPackageProductDependency; package = 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; productName = MarkdownUI; }; - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */ = { - isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; - }; DE26D95E2DBB3E9F007E6668 /* FirebaseAI */ = { isa = XCSwiftPackageProductDependency; package = DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */; diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 93247fa5c..996036d37 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -14,6 +14,7 @@ import SwiftUI import FirebaseAI +import GenerativeAIUIComponents enum BackendOption: String, CaseIterable, Identifiable { case googleAI = "Gemini Developer API" @@ -33,63 +34,87 @@ enum BackendOption: String, CaseIterable, Identifiable { struct ContentView: View { @State private var selectedBackend: BackendOption = .googleAI @State private var firebaseService: FirebaseAI = FirebaseAI.firebaseAI(backend: .googleAI()) + @State private var selectedUseCase: UseCase = .text + + var filteredSamples: [Sample] { + Sample.samples.filter { $0.useCases.contains(selectedUseCase) } + } + + let columns = [ + GridItem(.adaptive(minimum: 150)), + ] var body: some View { NavigationStack { - List { - Section("Configuration") { - Picker("Backend", selection: $selectedBackend) { - ForEach(BackendOption.allCases) { option in - Text(option.rawValue).tag(option) + ScrollView { + VStack(alignment: .leading, spacing: 20) { + // Backend Configuration + GroupBox(label: Text("Backend Configuration").font(.system(size: 18, weight: .bold))) { + Picker("Backend", selection: $selectedBackend) { + ForEach(BackendOption.allCases) { option in + Text(option.rawValue).tag(option) + } } + .pickerStyle(SegmentedPickerStyle()) } - } + .padding(.horizontal) - Section("Examples") { - NavigationLink { - GenerateContentScreen(firebaseService: firebaseService) - } label: { - Label("Generate Content", systemImage: "doc.text") - } - NavigationLink { - PhotoReasoningScreen(firebaseService: firebaseService) - } label: { - Label("Multi-modal", systemImage: "doc.richtext") - } - NavigationLink { - ConversationScreen(firebaseService: firebaseService, title: "Chat") - } label: { - Label("Chat", systemImage: "ellipsis.message.fill") - } - NavigationLink { - ConversationScreen( - firebaseService: firebaseService, - title: "Grounding", - searchGroundingEnabled: true - ) - } label: { - Label("Grounding with Google Search", systemImage: "magnifyingglass") - } - NavigationLink { - FunctionCallingScreen(firebaseService: firebaseService) - } label: { - Label("Function Calling", systemImage: "function") + // Use Case Filter + VStack(alignment: .leading) { + Text("Filter by use case") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + ScrollView(.horizontal, showsIndicators: false) { + HStack(spacing: 10) { + ForEach(UseCase.allCases) { useCase in + FilterChipView(useCase: useCase, isSelected: selectedUseCase == useCase) { + selectedUseCase = useCase + } + } + } + .padding(.horizontal) + } } - NavigationLink { - ImagenScreen(firebaseService: firebaseService) - } label: { - Label("Imagen", systemImage: "camera.circle") + // Samples + VStack(alignment: .leading) { + Text("Samples") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + LazyVGrid(columns: columns, spacing: 20) { + ForEach(filteredSamples) { sample in + NavigationLink(destination: destinationView(for: sample)) { + SampleCardView(sample: sample) + } + .buttonStyle(PlainButtonStyle()) + } + } + .padding(.horizontal) } } + .padding(.vertical) } - .navigationTitle("Generative AI Examples") + .navigationTitle("Firebase AI Logic") .onChange(of: selectedBackend) { newBackend in firebaseService = newBackend.backendValue - // Note: This might cause views that hold the old service instance to misbehave - // unless they are also correctly updated or recreated. } } } + + @ViewBuilder + private func destinationView(for sample: Sample) -> some View { + switch sample.useCase { + case .text: + ConversationScreen(firebaseService: firebaseService, sampleId: sample.id) + case .image: + ImagenScreen(firebaseService: firebaseService, sampleId: sample.id) + case .video, .audio, .document: + PhotoReasoningScreen(firebaseService: firebaseService) + case .functionCalling: + FunctionCallingScreen(firebaseService: firebaseService) + } + } } #Preview { diff --git a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift new file mode 100644 index 000000000..bd7e1528a --- /dev/null +++ b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift @@ -0,0 +1,33 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI +import GenerativeAIUIComponents + +struct FilterChipView: View { + let useCase: UseCase + let isSelected: Bool + let action: () -> Void + + var body: some View { + Button(action: action) { + Text(useCase.rawValue) + .padding(.horizontal, 16) + .padding(.vertical, 8) + .background(isSelected ? Color.blue.opacity(0.8) : Color.gray.opacity(0.2)) + .foregroundColor(isSelected ? .white : .primary) + .cornerRadius(12) + } + } +} diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift new file mode 100644 index 000000000..6f858bbee --- /dev/null +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -0,0 +1,36 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI +import GenerativeAIUIComponents + +struct SampleCardView: View { + let sample: Sample + + var body: some View { + VStack(alignment: .leading) { + Text(sample.title) + .font(.system(size: 17, weight: .medium)) + Text(sample.description) + .font(.system(size: 14)) + .foregroundColor(.secondary) + .padding(.top, 4) + } + .padding() + .frame(maxWidth: .infinity, minHeight: 150, maxHeight: .infinity, alignment: .top) + .background(Color.white) + .cornerRadius(12) + .shadow(radius: 3) + } +} diff --git a/firebaseai/GenerativeAIUIComponents/Package.swift b/firebaseai/GenerativeAIUIComponents/Package.swift index 808f5f42a..b174a6284 100644 --- a/firebaseai/GenerativeAIUIComponents/Package.swift +++ b/firebaseai/GenerativeAIUIComponents/Package.swift @@ -27,9 +27,15 @@ let package = Package( targets: ["GenerativeAIUIComponents"] ), ], + dependencies: [ + .package(url: "https://github.com/firebase/firebase-ios-sdk.git", from: "12.0.0"), + ], targets: [ .target( - name: "GenerativeAIUIComponents" + name: "GenerativeAIUIComponents", + dependencies: [ + .product(name: "FirebaseAI", package: "firebase-ios-sdk"), + ] ), ] ) diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift new file mode 100644 index 000000000..589618f41 --- /dev/null +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -0,0 +1,220 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import FirebaseAI + +public struct Sample: Identifiable { + public let id = UUID() + public let title: String + public let description: String + public let useCases: [UseCase] + public let chatHistory: [ModelContent]? + public let initialPrompt: String? + public let systemInstruction: ModelContent? + public let tools: [Tool]? + + public init(title: String, + description: String, + useCases: [UseCase], + chatHistory: [ModelContent]? = nil, + initialPrompt: String? = nil, + systemInstruction: ModelContent? = nil, + tools: [Tool]? = nil) { + self.title = title + self.description = description + self.useCases = useCases + self.chatHistory = chatHistory + self.initialPrompt = initialPrompt + self.systemInstruction = systemInstruction + self.tools = tools + } + + public var useCase: UseCase { + return useCases.first ?? .text + } + + public static func find(by id: UUID?) -> Sample? { + guard let id = id else { return nil } + return samples.first { $0.id == id } + } +} + +extension Sample { + public static let samples: [Sample] = [ + // Text + Sample( + title: "Travel tips", + description: "The user wants the model to help a new traveler" + + " with travel tips", + useCases: [.text], + chatHistory: [ + ModelContent( + role: "user", + parts: "I have never traveled before. When should I book a flight?" + ), + ModelContent( + role: "model", + parts: "You should book flights a couple of months ahead of time. It will be cheaper and more flexible for you." + ), + ModelContent(role: "user", parts: "Do I need a passport?"), + ModelContent( + role: "model", + parts: "If you are traveling outside your own country, make sure your passport is up-to-date and valid for more than 6 months during your travel." + ), + ], + initialPrompt: "What else is important when traveling?", + systemInstruction: ModelContent(parts: "You are a Travel assistant. You will answer" + + " questions the user asks based on the information listed" + + " in Relevant Information. Do not hallucinate. Do not use" + + " the internet."), + ), + Sample( + title: "Chatbot recommendations for courses", + description: "A chatbot suggests courses for a performing arts program.", + useCases: [.text], + initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", + systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + + " program. You help students decide what course they will" + + " take during the summer."), + ), + // Image + Sample( + title: "Blog post creator", + description: "Create a blog post from an image file stored in Cloud Storage.", + useCases: [.image], + chatHistory: [ + ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), + ModelContent( + role: "model", + parts: "I'd be happy to help you create a blog post! Please share the image you'd like me to analyze and write about." + ), + ], + initialPrompt: "Please analyze this image and create an engaging blog post" + ), + Sample( + title: "Imagen 3 - image generation", + description: "Generate images using Imagen 3", + useCases: [.image], + initialPrompt: "A photo of a modern building with water in the background" + ), + Sample( + title: "Gemini 2.0 Flash - image generation", + description: "Generate and/or edit images using Gemini 2.0 Flash", + useCases: [.image], + chatHistory: [ + ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), + ModelContent( + role: "model", + parts: "I can help you edit images using Gemini 2.0 Flash. Please share the image you'd like me to modify." + ), + ], + initialPrompt: "" + ), + // Video + Sample( + title: "Hashtags for a video", + description: "Generate hashtags for a video ad stored in Cloud Storage.", + useCases: [.video], + chatHistory: [ + ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), + ModelContent( + role: "model", + parts: "I'd be happy to help you generate relevant hashtags! Please share your video or describe what it's about so I can suggest appropriate hashtags." + ), + ], + initialPrompt: "" + ), + Sample( + title: "Summarize video", + description: "Summarize a video and extract important dialogue.", + useCases: [.video], + chatHistory: [ + ModelContent(role: "user", parts: "Can you summarize this video for me?"), + ModelContent( + role: "model", + parts: "I can help you summarize videos and extract key dialogue. Please share the video you'd like me to analyze." + ), + ], + initialPrompt: "" + ), + // Audio + Sample( + title: "Audio Summarization", + description: "Summarize an audio file", + useCases: [.audio], + chatHistory: [ + ModelContent(role: "user", parts: "Can you summarize this audio recording?"), + ModelContent( + role: "model", + parts: "I can help you summarize audio files. Please share the audio recording you'd like me to analyze." + ), + ], + initialPrompt: "" + ), + Sample( + title: "Translation from audio", + description: "Translate an audio file stored in Cloud Storage", + useCases: [.audio], + chatHistory: [ + ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), + ModelContent( + role: "model", + parts: "I can help you translate audio files. Please share the audio file you'd like me to translate." + ), + ], + initialPrompt: "" + ), + // Document + Sample( + title: "Document comparison", + description: "Compare the contents of 2 documents." + + " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", + useCases: [.document], + chatHistory: [ + ModelContent(role: "user", parts: "Can you compare these two documents for me?"), + ModelContent( + role: "model", + parts: "I can help you compare documents using the Vertex AI Gemini API. Please share the two documents you'd like me to compare." + ), + ], + initialPrompt: "" + ), + // Function Calling + Sample( + title: "Weather Chat", + description: "Use function calling to get the weather conditions" + + " for a specific US city on a specific date.", + useCases: [.functionCalling, .text], + chatHistory: [ + ModelContent(role: "user", parts: "What's the weather like in New York today?"), + ModelContent( + role: "model", + parts: "I can help you get weather information using function calling. Let me check the current weather conditions for New York." + ), + ], + initialPrompt: "" + ), + // Grounding + Sample( + title: "Grounding with Google Search", + description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", + useCases: [.text], + initialPrompt: "What's the weather in Chicago this weekend?", + tools: [.googleSearch()] + ), + ] + + public static var sample = samples[0] +} diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift new file mode 100644 index 000000000..5448dc01b --- /dev/null +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift @@ -0,0 +1,26 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +public enum UseCase: String, CaseIterable, Identifiable { + case text = "Text" + case image = "Image" + case video = "Video" + case audio = "Audio" + case document = "Document" + case functionCalling = "Function Calling" + + public var id: String { rawValue } +} diff --git a/firebaseai/ImagenScreen/ImagenScreen.swift b/firebaseai/ImagenScreen/ImagenScreen.swift index f8e7a088a..7b0892a01 100644 --- a/firebaseai/ImagenScreen/ImagenScreen.swift +++ b/firebaseai/ImagenScreen/ImagenScreen.swift @@ -20,9 +20,14 @@ struct ImagenScreen: View { let firebaseService: FirebaseAI @StateObject var viewModel: ImagenViewModel - init(firebaseService: FirebaseAI) { + @State + private var userPrompt = "" + + init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { self.firebaseService = firebaseService - _viewModel = StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService)) + _viewModel = + StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService, + sampleId: sampleId)) } enum FocusedField: Hashable { @@ -36,7 +41,7 @@ struct ImagenScreen: View { ZStack { ScrollView { VStack { - InputField("Enter a prompt to generate an image", text: $viewModel.userInput) { + InputField("Enter a prompt to generate an image", text: $userPrompt) { Image( systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" ) @@ -68,15 +73,25 @@ struct ImagenScreen: View { .onTapGesture { focusedField = nil } - .navigationTitle("Imagen example") + .toolbar { + ToolbarItem(placement: .principal) { + Text("Imagen example") + .font(.system(size: 24, weight: .bold)) + .foregroundColor(.primary) + .padding(.top, 10) + } + } .onAppear { focusedField = .message + if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { + userPrompt = viewModel.initialPrompt + } } } private func sendMessage() { Task { - await viewModel.generateImage(prompt: viewModel.userInput) + await viewModel.generateImage(prompt: userPrompt) focusedField = .message } } diff --git a/firebaseai/ImagenScreen/ImagenViewModel.swift b/firebaseai/ImagenScreen/ImagenViewModel.swift index d4fc2b43f..ca79ddc00 100644 --- a/firebaseai/ImagenScreen/ImagenViewModel.swift +++ b/firebaseai/ImagenScreen/ImagenViewModel.swift @@ -16,13 +16,14 @@ import FirebaseAI import Foundation import OSLog import SwiftUI +import GenerativeAIUIComponents @MainActor class ImagenViewModel: ObservableObject { private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") @Published - var userInput: String = "" + var initialPrompt: String = "" @Published var images = [UIImage]() @@ -37,7 +38,11 @@ class ImagenViewModel: ObservableObject { private var generateImagesTask: Task? - init(firebaseService: FirebaseAI) { + private var sample: Sample? + + init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { + sample = Sample.find(by: sampleId) + let modelName = "imagen-3.0-generate-002" let safetySettings = ImagenSafetySettings( safetyFilterLevel: .blockLowAndAbove @@ -51,6 +56,8 @@ class ImagenViewModel: ObservableObject { generationConfig: generationConfig, safetySettings: safetySettings ) + + initialPrompt = sample?.initialPrompt ?? "" } func generateImage(prompt: String) async { From 44169565072ac335eaf0d3d3eff87d5791722c4e Mon Sep 17 00:00:00 2001 From: haibo Date: Wed, 30 Jul 2025 17:00:06 -0700 Subject: [PATCH 2/7] Some minor layout fixes --- firebaseai/ChatExample/Screens/ConversationScreen.swift | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/firebaseai/ChatExample/Screens/ConversationScreen.swift b/firebaseai/ChatExample/Screens/ConversationScreen.swift index d5a70f83d..369153d2f 100644 --- a/firebaseai/ChatExample/Screens/ConversationScreen.swift +++ b/firebaseai/ChatExample/Screens/ConversationScreen.swift @@ -18,7 +18,6 @@ import SwiftUI struct ConversationScreen: View { let firebaseService: FirebaseAI - let title: String @StateObject var viewModel: ConversationViewModel @State @@ -135,7 +134,7 @@ struct ConversationScreen_Previews: PreviewProvider { .firebaseAI()) // Example service init var body: some View { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") + ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) .onAppear { viewModel.messages = ChatMessage.samples } @@ -144,7 +143,7 @@ struct ConversationScreen_Previews: PreviewProvider { static var previews: some View { NavigationStack { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") + ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) } } } From a00ec00177ac8b5f249892eecdcbe0ffc9a604ca Mon Sep 17 00:00:00 2001 From: haibo Date: Wed, 30 Jul 2025 17:28:27 -0700 Subject: [PATCH 3/7] iterate over all TextParts --- firebaseai/ChatExample/Models/ChatMessage.swift | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/firebaseai/ChatExample/Models/ChatMessage.swift b/firebaseai/ChatExample/Models/ChatMessage.swift index d0ba71db8..6cabe5cf7 100644 --- a/firebaseai/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/ChatExample/Models/ChatMessage.swift @@ -74,8 +74,9 @@ extension ChatMessage { extension ChatMessage { // Convert ModelContent to ChatMessage static func from(_ modelContent: ModelContent) -> ChatMessage? { - // Extract text from parts - parts is Array - guard let textPart = modelContent.parts.first as? TextPart else { + // Extract text from all parts + let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() + guard !text.isEmpty else { return nil } @@ -89,7 +90,7 @@ extension ChatMessage { return nil } - return ChatMessage(message: textPart.text, participant: participant) + return ChatMessage(message: text, participant: participant) } // Convert array of ModelContent to array of ChatMessage From 750bc60a388c0735aab7f52e11f13adc9d4c5795 Mon Sep 17 00:00:00 2001 From: haibo Date: Wed, 30 Jul 2025 17:48:58 -0700 Subject: [PATCH 4/7] add navRoute in Sample --- .../FirebaseAIExample/ContentView.swift | 12 +++++++----- .../Models/Sample.swift | 19 +++++++++++++++---- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 996036d37..492efc873 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -104,15 +104,17 @@ struct ContentView: View { @ViewBuilder private func destinationView(for sample: Sample) -> some View { - switch sample.useCase { - case .text: + switch sample.navRoute { + case "ConversationScreen": ConversationScreen(firebaseService: firebaseService, sampleId: sample.id) - case .image: + case "ImagenScreen": ImagenScreen(firebaseService: firebaseService, sampleId: sample.id) - case .video, .audio, .document: + case "PhotoReasoningScreen": PhotoReasoningScreen(firebaseService: firebaseService) - case .functionCalling: + case "FunctionCallingScreen": FunctionCallingScreen(firebaseService: firebaseService) + default: + EmptyView() } } } diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift index 589618f41..d96dbaf75 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -20,6 +20,7 @@ public struct Sample: Identifiable { public let title: String public let description: String public let useCases: [UseCase] + public let navRoute: String public let chatHistory: [ModelContent]? public let initialPrompt: String? public let systemInstruction: ModelContent? @@ -28,6 +29,7 @@ public struct Sample: Identifiable { public init(title: String, description: String, useCases: [UseCase], + navRoute: String, chatHistory: [ModelContent]? = nil, initialPrompt: String? = nil, systemInstruction: ModelContent? = nil, @@ -35,16 +37,13 @@ public struct Sample: Identifiable { self.title = title self.description = description self.useCases = useCases + self.navRoute = navRoute self.chatHistory = chatHistory self.initialPrompt = initialPrompt self.systemInstruction = systemInstruction self.tools = tools } - public var useCase: UseCase { - return useCases.first ?? .text - } - public static func find(by id: UUID?) -> Sample? { guard let id = id else { return nil } return samples.first { $0.id == id } @@ -59,6 +58,7 @@ extension Sample { description: "The user wants the model to help a new traveler" + " with travel tips", useCases: [.text], + navRoute: "ConversationScreen", chatHistory: [ ModelContent( role: "user", @@ -84,6 +84,7 @@ extension Sample { title: "Chatbot recommendations for courses", description: "A chatbot suggests courses for a performing arts program.", useCases: [.text], + navRoute: "ConversationScreen", initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + " program. You help students decide what course they will" + @@ -94,6 +95,7 @@ extension Sample { title: "Blog post creator", description: "Create a blog post from an image file stored in Cloud Storage.", useCases: [.image], + navRoute: "PhotoReasoningScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), ModelContent( @@ -107,12 +109,14 @@ extension Sample { title: "Imagen 3 - image generation", description: "Generate images using Imagen 3", useCases: [.image], + navRoute: "ImagenScreen", initialPrompt: "A photo of a modern building with water in the background" ), Sample( title: "Gemini 2.0 Flash - image generation", description: "Generate and/or edit images using Gemini 2.0 Flash", useCases: [.image], + navRoute: "PhotoReasoningScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), ModelContent( @@ -127,6 +131,7 @@ extension Sample { title: "Hashtags for a video", description: "Generate hashtags for a video ad stored in Cloud Storage.", useCases: [.video], + navRoute: "ConversationScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), ModelContent( @@ -140,6 +145,7 @@ extension Sample { title: "Summarize video", description: "Summarize a video and extract important dialogue.", useCases: [.video], + navRoute: "ConversationScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this video for me?"), ModelContent( @@ -154,6 +160,7 @@ extension Sample { title: "Audio Summarization", description: "Summarize an audio file", useCases: [.audio], + navRoute: "ConversationScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this audio recording?"), ModelContent( @@ -167,6 +174,7 @@ extension Sample { title: "Translation from audio", description: "Translate an audio file stored in Cloud Storage", useCases: [.audio], + navRoute: "ConversationScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), ModelContent( @@ -182,6 +190,7 @@ extension Sample { description: "Compare the contents of 2 documents." + " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", useCases: [.document], + navRoute: "ConversationScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you compare these two documents for me?"), ModelContent( @@ -197,6 +206,7 @@ extension Sample { description: "Use function calling to get the weather conditions" + " for a specific US city on a specific date.", useCases: [.functionCalling, .text], + navRoute: "FunctionCallingScreen", chatHistory: [ ModelContent(role: "user", parts: "What's the weather like in New York today?"), ModelContent( @@ -211,6 +221,7 @@ extension Sample { title: "Grounding with Google Search", description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", useCases: [.text], + navRoute: "ConversationScreen", initialPrompt: "What's the weather in Chicago this weekend?", tools: [.googleSearch()] ), From 68537642a50fbbbb3d41b7192fc1400d9aabcea7 Mon Sep 17 00:00:00 2001 From: haibo Date: Sat, 2 Aug 2025 12:15:55 -0700 Subject: [PATCH 5/7] fix style in light/dark mode --- .../ChatExample/Models/ChatMessage.swift | 14 +-- .../Screens/ConversationScreen.swift | 13 +-- .../ViewModels/ConversationViewModel.swift | 5 +- .../FirebaseAIExample/ContentView.swift | 17 ++- .../Views/FilterChipView.swift | 32 +++++- .../Views/SampleCardView.swift | 104 ++++++++++++++++-- .../Models/Sample.swift | 7 +- firebaseai/ImagenScreen/ImagenScreen.swift | 13 +-- firebaseai/ImagenScreen/ImagenViewModel.swift | 4 +- 9 files changed, 149 insertions(+), 60 deletions(-) diff --git a/firebaseai/ChatExample/Models/ChatMessage.swift b/firebaseai/ChatExample/Models/ChatMessage.swift index 6cabe5cf7..33725083f 100644 --- a/firebaseai/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/ChatExample/Models/ChatMessage.swift @@ -72,28 +72,18 @@ extension ChatMessage { } extension ChatMessage { - // Convert ModelContent to ChatMessage static func from(_ modelContent: ModelContent) -> ChatMessage? { - // Extract text from all parts + // TODO: add non-text parts to message when multi-model support is added let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() guard !text.isEmpty else { return nil } - let participant: Participant - switch modelContent.role { - case "user": - participant = .user - case "model": - participant = .system - default: - return nil - } + let participant: Participant = (modelContent.role == "user") ? .user : .system return ChatMessage(message: text, participant: participant) } - // Convert array of ModelContent to array of ChatMessage static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { return modelContents.compactMap { from($0) } } diff --git a/firebaseai/ChatExample/Screens/ConversationScreen.swift b/firebaseai/ChatExample/Screens/ConversationScreen.swift index 369153d2f..c26f76ddb 100644 --- a/firebaseai/ChatExample/Screens/ConversationScreen.swift +++ b/firebaseai/ChatExample/Screens/ConversationScreen.swift @@ -23,11 +23,11 @@ struct ConversationScreen: View { @State private var userPrompt = "" - init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService _viewModel = StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - sampleId: sampleId)) + sample: sample)) } enum FocusedField: Hashable { @@ -83,18 +83,13 @@ struct ConversationScreen: View { focusedField = nil } .toolbar { - ToolbarItem(placement: .principal) { - Text(viewModel.title) - .font(.system(size: 24, weight: .bold)) - .foregroundColor(.primary) - .padding(.top, 10) - } ToolbarItem(placement: .primaryAction) { Button(action: newChat) { Image(systemName: "square.and.pencil") } } } + .navigationTitle(viewModel.title) .onAppear { focusedField = .message // Set initial prompt from viewModel if available @@ -131,7 +126,7 @@ struct ConversationScreen: View { struct ConversationScreen_Previews: PreviewProvider { struct ContainerView: View { @StateObject var viewModel = ConversationViewModel(firebaseService: FirebaseAI - .firebaseAI()) // Example service init + .firebaseAI(), sample: nil) // Example service init var body: some View { ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) diff --git a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift index ace4fccd3..676534a13 100644 --- a/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/ChatExample/ViewModels/ConversationViewModel.swift @@ -41,9 +41,8 @@ class ConversationViewModel: ObservableObject { private var sample: Sample? - init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { - // retrieve sample from sampleId - sample = Sample.find(by: sampleId) + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample // create a generative model with sample data model = firebaseService.generativeModel( diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 492efc873..0cc5b16a5 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -49,15 +49,20 @@ struct ContentView: View { ScrollView { VStack(alignment: .leading, spacing: 20) { // Backend Configuration - GroupBox(label: Text("Backend Configuration").font(.system(size: 18, weight: .bold))) { + VStack(alignment: .leading) { + Text("Backend Configuration") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + Picker("Backend", selection: $selectedBackend) { ForEach(BackendOption.allCases) { option in - Text(option.rawValue).tag(option) + Text(option.rawValue) + .tag(option) } } .pickerStyle(SegmentedPickerStyle()) + .padding(.horizontal) } - .padding(.horizontal) // Use Case Filter VStack(alignment: .leading) { @@ -76,6 +81,7 @@ struct ContentView: View { .padding(.horizontal) } } + // Samples VStack(alignment: .leading) { Text("Samples") @@ -95,6 +101,7 @@ struct ContentView: View { } .padding(.vertical) } + .background(Color(.systemGroupedBackground)) .navigationTitle("Firebase AI Logic") .onChange(of: selectedBackend) { newBackend in firebaseService = newBackend.backendValue @@ -106,9 +113,9 @@ struct ContentView: View { private func destinationView(for sample: Sample) -> some View { switch sample.navRoute { case "ConversationScreen": - ConversationScreen(firebaseService: firebaseService, sampleId: sample.id) + ConversationScreen(firebaseService: firebaseService, sample: sample) case "ImagenScreen": - ImagenScreen(firebaseService: firebaseService, sampleId: sample.id) + ImagenScreen(firebaseService: firebaseService, sample: sample) case "PhotoReasoningScreen": PhotoReasoningScreen(firebaseService: firebaseService) case "FunctionCallingScreen": diff --git a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift index bd7e1528a..8c6ad2bf1 100644 --- a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift +++ b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift @@ -23,11 +23,33 @@ struct FilterChipView: View { var body: some View { Button(action: action) { Text(useCase.rawValue) - .padding(.horizontal, 16) - .padding(.vertical, 8) - .background(isSelected ? Color.blue.opacity(0.8) : Color.gray.opacity(0.2)) - .foregroundColor(isSelected ? .white : .primary) - .cornerRadius(12) + .padding(.horizontal) } + .filterChipStyle(isSelected: isSelected) + } +} + +private struct FilterChipStyle: ViewModifier { + let isSelected: Bool + + func body(content: Content) -> some View { + if isSelected { + content.buttonStyle(.borderedProminent) + } else { + content.buttonStyle(.bordered) + } + } +} + +extension View { + func filterChipStyle(isSelected: Bool) -> some View { + modifier(FilterChipStyle(isSelected: isSelected)) + } +} + +#Preview { + VStack(spacing: 16) { + FilterChipView(useCase: .text, isSelected: true) {} + FilterChipView(useCase: .text, isSelected: false) {} } } diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift index 6f858bbee..75f85fb26 100644 --- a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -19,18 +19,106 @@ struct SampleCardView: View { let sample: Sample var body: some View { - VStack(alignment: .leading) { - Text(sample.title) - .font(.system(size: 17, weight: .medium)) + GroupBox { Text(sample.description) .font(.system(size: 14)) .foregroundColor(.secondary) - .padding(.top, 4) + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) + } label: { + HStack { + if let useCase = sample.useCases.first { + Image(systemName: systemName(for: useCase)) + .foregroundColor(color(for: useCase)) + } + Text(sample.title) + .font(.system(size: 17, weight: .medium)) + } } - .padding() + .groupBoxStyle(CardGroupBoxStyle()) .frame(maxWidth: .infinity, minHeight: 150, maxHeight: .infinity, alignment: .top) - .background(Color.white) - .cornerRadius(12) - .shadow(radius: 3) } + + private func systemName(for useCase: UseCase) -> String { + switch useCase { + case .text: "text.bubble.fill" + case .image: "photo.fill" + case .video: "video.fill" + case .audio: "waveform" + case .document: "doc.fill" + case .functionCalling: "gearshape.2.fill" + } + } + + private func color(for useCase: UseCase) -> Color { + switch useCase { + case .text:.blue + case .image:.purple + case .video:.red + case .audio:.orange + case .document:.gray + case .functionCalling:.green + } + } +} + +public struct CardGroupBoxStyle: GroupBoxStyle { + private var cornerRadius: CGFloat { + if #available(iOS 26.0, *) { + return 28 + } else { + return 12 + } + } + + public func makeBody(configuration: Configuration) -> some View { + VStack(alignment: .leading, spacing: 12) { + configuration.label + configuration.content + } + .padding() + .background(Color(.secondarySystemGroupedBackground)) + .clipShape(RoundedRectangle(cornerRadius: cornerRadius, style: .continuous)) + } +} + +#Preview { + let samples = [ + Sample( + title: "Sample 1", + description: "This is the first sample card.", + useCases: [.text], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 2", + description: "This is the second sample card.", + useCases: [.image], + navRoute: "PhotoReasoningScreen" + ), + Sample( + title: "Sample 3", + description: "This is the third sample card.", + useCases: [.video], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 4", + description: "This is the fourth sample card, which is a bit longer to see how the text wraps and if everything still aligns correctly.", + useCases: [.audio], + navRoute: "ConversationScreen" + ), + ] + + ScrollView { + LazyVGrid(columns: [ + GridItem(.flexible()), + GridItem(.flexible()), + ], spacing: 16) { + ForEach(samples) { sample in + SampleCardView(sample: sample) + } + } + .padding() + } + .background(Color(.systemGroupedBackground)) } diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift index d96dbaf75..0ce7cd161 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -15,7 +15,7 @@ import Foundation import FirebaseAI -public struct Sample: Identifiable { +public class Sample: Identifiable { public let id = UUID() public let title: String public let description: String @@ -43,11 +43,6 @@ public struct Sample: Identifiable { self.systemInstruction = systemInstruction self.tools = tools } - - public static func find(by id: UUID?) -> Sample? { - guard let id = id else { return nil } - return samples.first { $0.id == id } - } } extension Sample { diff --git a/firebaseai/ImagenScreen/ImagenScreen.swift b/firebaseai/ImagenScreen/ImagenScreen.swift index 7b0892a01..a2d2b9fee 100644 --- a/firebaseai/ImagenScreen/ImagenScreen.swift +++ b/firebaseai/ImagenScreen/ImagenScreen.swift @@ -23,11 +23,11 @@ struct ImagenScreen: View { @State private var userPrompt = "" - init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService _viewModel = StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService, - sampleId: sampleId)) + sample: sample)) } enum FocusedField: Hashable { @@ -73,14 +73,7 @@ struct ImagenScreen: View { .onTapGesture { focusedField = nil } - .toolbar { - ToolbarItem(placement: .principal) { - Text("Imagen example") - .font(.system(size: 24, weight: .bold)) - .foregroundColor(.primary) - .padding(.top, 10) - } - } + .navigationTitle("Imagen example") .onAppear { focusedField = .message if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { diff --git a/firebaseai/ImagenScreen/ImagenViewModel.swift b/firebaseai/ImagenScreen/ImagenViewModel.swift index ca79ddc00..d4d7f0e6e 100644 --- a/firebaseai/ImagenScreen/ImagenViewModel.swift +++ b/firebaseai/ImagenScreen/ImagenViewModel.swift @@ -40,8 +40,8 @@ class ImagenViewModel: ObservableObject { private var sample: Sample? - init(firebaseService: FirebaseAI, sampleId: UUID? = nil) { - sample = Sample.find(by: sampleId) + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample let modelName = "imagen-3.0-generate-002" let safetySettings = ImagenSafetySettings( From 0b2a6c9ea6ff25b5f698c29bcc5eec48bf6e1332 Mon Sep 17 00:00:00 2001 From: Haibo Yang Date: Mon, 4 Aug 2025 09:20:01 -0700 Subject: [PATCH 6/7] change Hstack to Label for multi-lines --- .../FirebaseAIExample/Views/SampleCardView.swift | 10 +++++----- .../GenerativeAIUIComponents/Models/Sample.swift | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift index 75f85fb26..139a391b5 100644 --- a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -25,11 +25,11 @@ struct SampleCardView: View { .foregroundColor(.secondary) .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) } label: { - HStack { - if let useCase = sample.useCases.first { - Image(systemName: systemName(for: useCase)) - .foregroundColor(color(for: useCase)) - } + if let useCase = sample.useCases.first { + Label(sample.title, systemImage: systemName(for: useCase)) + .font(.system(size: 17, weight: .medium)) + .foregroundColor(color(for: useCase)) + } else { Text(sample.title) .font(.system(size: 17, weight: .medium)) } diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift index 0ce7cd161..dbb6c5375 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -15,7 +15,7 @@ import Foundation import FirebaseAI -public class Sample: Identifiable { +public struct Sample: Identifiable { public let id = UUID() public let title: String public let description: String From 21c21192b3ff2e17e4c26e32eb3ae83f85ccbd2c Mon Sep 17 00:00:00 2001 From: Haibo Yang Date: Mon, 4 Aug 2025 15:58:46 -0700 Subject: [PATCH 7/7] add .inline for navigationTitleMode --- .../ChatExample/Screens/ConversationScreen.swift | 1 + .../GenerativeAIUIComponents/Models/Sample.swift | 14 +++----------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/firebaseai/ChatExample/Screens/ConversationScreen.swift b/firebaseai/ChatExample/Screens/ConversationScreen.swift index c26f76ddb..6c6f7eac6 100644 --- a/firebaseai/ChatExample/Screens/ConversationScreen.swift +++ b/firebaseai/ChatExample/Screens/ConversationScreen.swift @@ -90,6 +90,7 @@ struct ConversationScreen: View { } } .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message // Set initial prompt from viewModel if available diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift index dbb6c5375..3d7637a77 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -197,19 +197,11 @@ extension Sample { ), // Function Calling Sample( - title: "Weather Chat", - description: "Use function calling to get the weather conditions" + - " for a specific US city on a specific date.", + title: "Currency conversion", + description: "Use function calling to convert currency", useCases: [.functionCalling, .text], navRoute: "FunctionCallingScreen", - chatHistory: [ - ModelContent(role: "user", parts: "What's the weather like in New York today?"), - ModelContent( - role: "model", - parts: "I can help you get weather information using function calling. Let me check the current weather conditions for New York." - ), - ], - initialPrompt: "" + initialPrompt: "What is 100 Euros in USD?" ), // Grounding Sample(