Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions firebase-ai/app/src/main/AndroidManifest.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,9 @@
</intent-filter>
</activity>
</application>
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />

</manifest>
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,34 @@ val FIREBASE_AI_SAMPLES = listOf(
)
}
),
Sample(
title = "Converse With AI",
description = "Simple app which allows you to talk to AI and" +
" also get information about the weather conditions",
navRoute = "stream",
categories = listOf(Category.LIVE_API, Category.AUDIO, Category.FUNCTION_CALLING),
tools = listOf(
Tool.functionDeclarations(
listOf(
FunctionDeclaration(
"fetchWeather",
"Get the weather conditions for a specific US city on a specific date.",
mapOf(
"city" to Schema.string("The US city of the location."),
"state" to Schema.string("The US state of the location."),
"date" to Schema.string(
"The date for which to get the weather." +
" Date must be in the format: YYYY-MM-DD."
),
),
)
)
)
),
initialPrompt = content {
text("What was the weather in Boston, MA on October 17, 2024?")
}
),
Sample(
title = "Weather Chat",
description = "Use function calling to get the weather conditions" +
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package com.google.firebase.quickstart.ai

import android.Manifest
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.os.Bundle
Expand All @@ -19,6 +21,8 @@ import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.runtime.setValue
import androidx.compose.ui.Modifier
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.navigation.NavController
import androidx.navigation.NavDestination
import androidx.navigation.compose.NavHost
Expand All @@ -38,6 +42,10 @@ class MainActivity : ComponentActivity() {
@OptIn(ExperimentalMaterial3Api::class)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
if(ContextCompat.checkSelfPermission(this,
Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, arrayOf(Manifest.permission.RECORD_AUDIO), 1)
}
enableEdgeToEdge()
catImage = BitmapFactory.decodeResource(applicationContext.resources, R.drawable.cat)
setContent {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
package com.google.firebase.quickstart.ai.feature.media.imagen

import android.Manifest
import android.graphics.Bitmap
import androidx.annotation.RequiresPermission
import androidx.lifecycle.SavedStateHandle
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import androidx.navigation.toRoute
import com.google.firebase.Firebase
import com.google.firebase.ai.FirebaseAI
import com.google.firebase.ai.ImagenModel
import com.google.firebase.ai.LiveGenerativeModel
import com.google.firebase.ai.ai
import com.google.firebase.ai.type.FunctionCallPart
import com.google.firebase.ai.type.FunctionResponsePart
import com.google.firebase.ai.type.GenerativeBackend
import com.google.firebase.ai.type.ImagenAspectRatio
import com.google.firebase.ai.type.ImagenImageFormat
import com.google.firebase.ai.type.ImagenPersonFilterLevel
import com.google.firebase.ai.type.ImagenSafetyFilterLevel
import com.google.firebase.ai.type.ImagenSafetySettings
import com.google.firebase.ai.type.InlineDataPart
import com.google.firebase.ai.type.LiveServerContent
import com.google.firebase.ai.type.LiveServerMessage
import com.google.firebase.ai.type.LiveSession
import com.google.firebase.ai.type.PublicPreviewAPI
import com.google.firebase.ai.type.ResponseModality
import com.google.firebase.ai.type.SpeechConfig
import com.google.firebase.ai.type.TextPart
import com.google.firebase.ai.type.Tool
import com.google.firebase.ai.type.Voice
import com.google.firebase.ai.type.asTextOrNull
import com.google.firebase.ai.type.imagenGenerationConfig
import com.google.firebase.ai.type.liveGenerationConfig
import com.google.firebase.app
import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES
import com.google.firebase.quickstart.ai.feature.live.StreamRealtimeRoute
import com.google.firebase.quickstart.ai.feature.text.functioncalling.WeatherRepository
import com.google.firebase.quickstart.ai.feature.text.functioncalling.WeatherRepository.Companion.fetchWeather
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.jsonPrimitive

@OptIn(PublicPreviewAPI::class)
class BidiViewModel(
savedStateHandle: SavedStateHandle
) : ViewModel() {
private val sampleId = savedStateHandle.toRoute<StreamRealtimeRoute>().sampleId
private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId }

// Firebase AI Logic
private var liveSession: LiveSession

init {
val liveGenerationConfig = liveGenerationConfig {
speechConfig = SpeechConfig(voice = Voice("CHARON"))
responseModality = ResponseModality.AUDIO
// Change this to ContentModality.TEXT if you want text output.
}
@OptIn(PublicPreviewAPI::class)
val liveModel = FirebaseAI.getInstance(Firebase.app, sample.backend).liveModel(
"gemini-live-2.5-flash-preview",
generationConfig = liveGenerationConfig,
tools = sample.tools
)
runBlocking {
liveSession = liveModel.connect()
}
}

fun handler(fetchWeatherCall: FunctionCallPart) : FunctionResponsePart {
val response:JsonObject
fetchWeatherCall.let {
val city = it.args["city"]!!.jsonPrimitive.content
val state = it.args["city"]!!.jsonPrimitive.content
val date = it.args["date"]!!.jsonPrimitive.content
runBlocking {
response = fetchWeather(city, state, date)
}
}
return FunctionResponsePart("fetchWeather", response, fetchWeatherCall.id)
}
@RequiresPermission(Manifest.permission.RECORD_AUDIO)
suspend fun startConversation() {
liveSession.startAudioConversation(::handler)
}

fun endConversation() {
liveSession.stopAudioConversation()
}


}
Original file line number Diff line number Diff line change
@@ -1,20 +1,146 @@
package com.google.firebase.quickstart.ai.feature.live

import androidx.compose.foundation.layout.Box
import android.Manifest
import androidx.annotation.RequiresPermission
import androidx.compose.animation.animateContentSize
import androidx.compose.foundation.layout.Arrangement
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.CallEnd
import androidx.compose.material.icons.filled.Mic
import androidx.compose.material3.Icon
import androidx.compose.material3.IconButton
import androidx.compose.material3.IconButtonDefaults
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Surface
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.compose.runtime.remember
import androidx.compose.runtime.mutableStateOf

import androidx.lifecycle.viewmodel.compose.viewModel
import com.google.firebase.quickstart.ai.feature.media.imagen.BidiViewModel
import com.google.firebase.quickstart.ai.feature.media.imagen.ImagenViewModel
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
import kotlinx.serialization.Serializable

@Serializable
class StreamRealtimeRoute(val sampleId: String)

@RequiresPermission(Manifest.permission.RECORD_AUDIO)
@Composable
fun StreamRealtimeScreen() {
Box(
modifier = Modifier.fillMaxSize()
fun StreamRealtimeScreen(bidiView: BidiViewModel = viewModel<BidiViewModel>()) {
val isConversationActive = remember { mutableStateOf(false) }
val backgroundColor =
MaterialTheme.colorScheme.background
Surface(
modifier = Modifier.fillMaxSize(),
color = backgroundColor
) {
Text("Coming soon")
Column(
modifier = Modifier
.fillMaxSize()
.padding(16.dp),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.Center
) {
// The content will animate its size when it changes
Column(
horizontalAlignment = Alignment.CenterHorizontally,
modifier = Modifier.animateContentSize()
) {
if (isConversationActive.value) {
// Active state UI
Text(
text = "Conversation Active",
fontSize = 22.sp,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onSurface
)
Spacer(modifier = Modifier.height(8.dp))
Text(
text = "Tap the end button to stop", // Example timer
fontSize = 18.sp,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
} else {
// Idle state UI
Text(
text = "Start Conversation",
fontSize = 22.sp,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onSurface
)
Spacer(modifier = Modifier.height(8.dp))
Text(
text = "Tap the microphone to begin",
fontSize = 18.sp,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}

Spacer(modifier = Modifier.height(80.dp))

// The main button with pulsing animation
if (isConversationActive.value) {
// Button to end the conversation
IconButton(
onClick = {
bidiView.endConversation()
isConversationActive.value = false },
modifier = Modifier
.size(90.dp)
.clip(CircleShape),
colors = IconButtonDefaults.iconButtonColors(
containerColor = Color(0xFFE63946), // A nice red color
contentColor = Color.White
)
) {
Icon(
imageVector = Icons.Default.CallEnd,
contentDescription = "End Conversation",
modifier = Modifier.size(48.dp)
)
}
} else {
// Button to start the conversation
IconButton(
onClick = {
CoroutineScope(Dispatchers.IO).launch {
bidiView.startConversation()
}
isConversationActive.value = true },
modifier = Modifier
.size(90.dp)
.clip(CircleShape),
colors = IconButtonDefaults.iconButtonColors(
containerColor = MaterialTheme.colorScheme.primary,
contentColor = Color.White
)
) {
Icon(
imageVector = Icons.Default.Mic,
contentDescription = "Start Conversation",
modifier = Modifier.size(48.dp)
)
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,6 @@ class WeatherRepository {
)
)
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ enum class Category(
AUDIO("Audio"),
DOCUMENT("Document"),
FUNCTION_CALLING("Function calling"),
LIVE_API("Bidi Streaming")
}

@OptIn(PublicPreviewAPI::class)
Expand Down
Loading