Skip to content

Commit af5b172

Browse files
committed
Format new files using ktfmt
1 parent dba04b3 commit af5b172

File tree

3 files changed

+66
-102
lines changed

3 files changed

+66
-102
lines changed
Lines changed: 17 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
package com.google.firebase.quickstart.ai.feature.live
22

3-
import android.Manifest
43
import android.graphics.Bitmap
5-
import androidx.annotation.RequiresPermission
64
import androidx.lifecycle.SavedStateHandle
75
import androidx.lifecycle.ViewModel
86
import androidx.lifecycle.viewModelScope
@@ -20,18 +18,15 @@ import com.google.firebase.ai.type.Voice
2018
import com.google.firebase.ai.type.liveGenerationConfig
2119
import com.google.firebase.app
2220
import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES
23-
import com.google.firebase.quickstart.ai.feature.live.StreamRealtimeRoute
2421
import com.google.firebase.quickstart.ai.feature.text.functioncalling.WeatherRepository.Companion.fetchWeather
22+
import java.io.ByteArrayOutputStream
2523
import kotlinx.coroutines.launch
2624
import kotlinx.coroutines.runBlocking
2725
import kotlinx.serialization.json.JsonObject
2826
import kotlinx.serialization.json.jsonPrimitive
29-
import java.io.ByteArrayOutputStream
3027

3128
@OptIn(PublicPreviewAPI::class)
32-
class BidiViewModel(
33-
savedStateHandle: SavedStateHandle
34-
) : ViewModel() {
29+
class BidiViewModel(savedStateHandle: SavedStateHandle) : ViewModel() {
3530
private val sampleId = savedStateHandle.toRoute<StreamRealtimeRoute>().sampleId
3631
private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId }
3732

@@ -46,14 +41,14 @@ class BidiViewModel(
4641
}
4742

4843
@OptIn(PublicPreviewAPI::class)
49-
val liveModel = FirebaseAI.getInstance(Firebase.app, sample.backend).liveModel(
50-
modelName = sample.modelName ?: "gemini-live-2.5-flash",
51-
generationConfig = liveGenerationConfig,
52-
tools = sample.tools
53-
)
54-
runBlocking {
55-
liveSession = liveModel.connect()
56-
}
44+
val liveModel =
45+
FirebaseAI.getInstance(Firebase.app, sample.backend)
46+
.liveModel(
47+
modelName = sample.modelName ?: "gemini-live-2.5-flash",
48+
generationConfig = liveGenerationConfig,
49+
tools = sample.tools,
50+
)
51+
runBlocking { liveSession = liveModel.connect() }
5752
}
5853

5954
fun handler(fetchWeatherCall: FunctionCallPart): FunctionResponsePart {
@@ -63,17 +58,17 @@ class BidiViewModel(
6358
val state = it.args["state"]?.jsonPrimitive?.content
6459
val date = it.args["date"]?.jsonPrimitive?.content
6560
runBlocking {
66-
response = if (!city.isNullOrEmpty() and !state.isNullOrEmpty() and date.isNullOrEmpty()) {
67-
fetchWeather(city!!, state!!, date!!)
68-
} else {
69-
JsonObject(emptyMap())
70-
}
61+
response =
62+
if (!city.isNullOrEmpty() and !state.isNullOrEmpty() and date.isNullOrEmpty()) {
63+
fetchWeather(city!!, state!!, date!!)
64+
} else {
65+
JsonObject(emptyMap())
66+
}
7167
}
7268
}
7369
return FunctionResponsePart("fetchWeather", response, fetchWeatherCall.id)
7470
}
7571

76-
7772
suspend fun startConversation() {
7873
liveSession.startAudioConversation(::handler)
7974
}
@@ -82,17 +77,14 @@ class BidiViewModel(
8277
liveSession.stopAudioConversation()
8378
}
8479

85-
8680
fun sendVideoFrame(frame: Bitmap) {
8781
viewModelScope.launch {
8882
// Directly compress the Bitmap to a ByteArray
8983
val byteArrayOutputStream = ByteArrayOutputStream()
9084
frame.compress(Bitmap.CompressFormat.JPEG, 80, byteArrayOutputStream)
9185
val jpegBytes = byteArrayOutputStream.toByteArray()
9286

93-
liveSession.sendVideoRealtime(
94-
InlineData(jpegBytes, "image/jpeg")
95-
)
87+
liveSession.sendVideoRealtime(InlineData(jpegBytes, "image/jpeg"))
9688
}
9789
}
9890
}
Lines changed: 31 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,7 @@
11
package com.google.firebase.quickstart.ai.feature.live
22

33
import android.annotation.SuppressLint
4-
import android.content.Context
54
import android.graphics.Bitmap
6-
import android.graphics.BitmapFactory
7-
import android.graphics.ImageFormat
8-
import android.graphics.Rect
9-
import android.graphics.YuvImage
105
import androidx.camera.core.CameraSelector
116
import androidx.camera.core.ImageAnalysis
127
import androidx.camera.core.ImageProxy
@@ -21,13 +16,12 @@ import androidx.compose.ui.platform.LocalLifecycleOwner
2116
import androidx.compose.ui.viewinterop.AndroidView
2217
import androidx.core.content.ContextCompat
2318
import androidx.lifecycle.LifecycleOwner
24-
import java.io.ByteArrayOutputStream
2519

2620
@Composable
2721
fun CameraView(
2822
modifier: Modifier = Modifier,
2923
cameraSelector: CameraSelector = CameraSelector.DEFAULT_BACK_CAMERA,
30-
onFrameCaptured: (Bitmap) -> Unit
24+
onFrameCaptured: (Bitmap) -> Unit,
3125
) {
3226
val context = LocalContext.current
3327
val lifecycleOwner = LocalLifecycleOwner.current
@@ -37,16 +31,19 @@ fun CameraView(
3731
factory = { ctx ->
3832
val previewView = PreviewView(ctx)
3933
val executor = ContextCompat.getMainExecutor(ctx)
40-
cameraProviderFuture.addListener({
41-
val cameraProvider = cameraProviderFuture.get()
42-
bindPreview(
43-
lifecycleOwner,
44-
previewView,
45-
cameraProvider,
46-
cameraSelector,
47-
onFrameCaptured
48-
)
49-
}, executor)
34+
cameraProviderFuture.addListener(
35+
{
36+
val cameraProvider = cameraProviderFuture.get()
37+
bindPreview(
38+
lifecycleOwner,
39+
previewView,
40+
cameraProvider,
41+
cameraSelector,
42+
onFrameCaptured,
43+
)
44+
},
45+
executor,
46+
)
5047
previewView
5148
},
5249
modifier = modifier,
@@ -58,29 +55,28 @@ private fun bindPreview(
5855
previewView: PreviewView,
5956
cameraProvider: ProcessCameraProvider,
6057
cameraSelector: CameraSelector,
61-
onFrameCaptured: (Bitmap) -> Unit
58+
onFrameCaptured: (Bitmap) -> Unit,
6259
) {
63-
val preview = Preview.Builder().build().also {
64-
it.setSurfaceProvider(previewView.surfaceProvider)
65-
}
60+
val preview =
61+
Preview.Builder().build().also { it.setSurfaceProvider(previewView.surfaceProvider) }
6662

67-
val imageAnalysis = ImageAnalysis.Builder()
68-
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
69-
.build()
70-
.also {
71-
it.setAnalyzer(ContextCompat.getMainExecutor(previewView.context), SecondIntervalAnalyzer(onFrameCaptured))
72-
}
63+
val imageAnalysis =
64+
ImageAnalysis.Builder()
65+
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
66+
.build()
67+
.also {
68+
it.setAnalyzer(
69+
ContextCompat.getMainExecutor(previewView.context),
70+
SecondIntervalAnalyzer(onFrameCaptured),
71+
)
72+
}
7373

7474
cameraProvider.unbindAll()
75-
cameraProvider.bindToLifecycle(
76-
lifecycleOwner,
77-
cameraSelector,
78-
preview,
79-
imageAnalysis
80-
)
75+
cameraProvider.bindToLifecycle(lifecycleOwner, cameraSelector, preview, imageAnalysis)
8176
}
8277

83-
private class SecondIntervalAnalyzer(private val onFrameCaptured: (Bitmap) -> Unit) : ImageAnalysis.Analyzer {
78+
private class SecondIntervalAnalyzer(private val onFrameCaptured: (Bitmap) -> Unit) :
79+
ImageAnalysis.Analyzer {
8480
private var lastFrameTimestamp = 0L
8581
private val interval = 1000L // 1 second
8682

@@ -97,4 +93,4 @@ private class SecondIntervalAnalyzer(private val onFrameCaptured: (Bitmap) -> Un
9793
}
9894
image.close()
9995
}
100-
}
96+
}
Lines changed: 18 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package com.google.firebase.quickstart.ai.feature.live
22

33
import android.Manifest
4-
import android.content.Context
54
import android.content.pm.PackageManager
65
import androidx.activity.compose.rememberLauncherForActivityResult
76
import androidx.activity.result.contract.ActivityResultContracts
@@ -25,82 +24,59 @@ import androidx.compose.ui.Modifier
2524
import androidx.compose.ui.platform.LocalContext
2625
import androidx.core.content.ContextCompat
2726
import androidx.lifecycle.viewmodel.compose.viewModel
28-
import com.google.firebase.quickstart.ai.feature.live.BidiViewModel
2927
import kotlinx.coroutines.launch
3028
import kotlinx.serialization.Serializable
3129

32-
@Serializable
33-
class StreamRealtimeVideoRoute(val sampleId: String)
30+
@Serializable class StreamRealtimeVideoRoute(val sampleId: String)
3431

3532
@RequiresPermission(allOf = [Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA])
3633
@Composable
3734
fun StreamRealtimeVideoScreen(bidiView: BidiViewModel = viewModel<BidiViewModel>()) {
38-
val backgroundColor =
39-
MaterialTheme.colorScheme.background
35+
val backgroundColor = MaterialTheme.colorScheme.background
4036

4137
val scope = rememberCoroutineScope()
4238

4339
val context = LocalContext.current
4440
var hasPermissions by remember {
4541
mutableStateOf(
46-
ContextCompat.checkSelfPermission(
47-
context,
48-
Manifest.permission.CAMERA
49-
) == PackageManager.PERMISSION_GRANTED && ContextCompat.checkSelfPermission(
50-
context,
51-
Manifest.permission.RECORD_AUDIO
52-
) == PackageManager.PERMISSION_GRANTED
42+
ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA) ==
43+
PackageManager.PERMISSION_GRANTED &&
44+
ContextCompat.checkSelfPermission(context, Manifest.permission.RECORD_AUDIO) ==
45+
PackageManager.PERMISSION_GRANTED
5346
)
5447
}
5548

56-
val launcher = rememberLauncherForActivityResult(
57-
ActivityResultContracts.RequestMultiplePermissions()
58-
) { permissions ->
59-
hasPermissions = permissions.values.all { it }
60-
}
49+
val launcher =
50+
rememberLauncherForActivityResult(ActivityResultContracts.RequestMultiplePermissions()) {
51+
permissions ->
52+
hasPermissions = permissions.values.all { it }
53+
}
6154

6255
LaunchedEffect(Unit) {
6356
if (!hasPermissions) {
64-
launcher.launch(
65-
arrayOf(
66-
Manifest.permission.CAMERA,
67-
Manifest.permission.RECORD_AUDIO
68-
)
69-
)
57+
launcher.launch(arrayOf(Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO))
7058
}
7159
}
7260

7361
DisposableEffect(hasPermissions) {
7462
if (hasPermissions) {
75-
scope.launch {
76-
bidiView.startConversation()
77-
}
78-
}
79-
onDispose {
80-
bidiView.endConversation()
63+
scope.launch { bidiView.startConversation() }
8164
}
65+
onDispose { bidiView.endConversation() }
8266
}
8367

84-
Surface(
85-
modifier = Modifier.fillMaxSize(),
86-
color = backgroundColor
87-
) {
68+
Surface(modifier = Modifier.fillMaxSize(), color = backgroundColor) {
8869
Column(modifier = Modifier.fillMaxSize()) {
8970
if (hasPermissions) {
90-
Box(
91-
modifier = Modifier
92-
.fillMaxSize()
93-
) {
71+
Box(modifier = Modifier.fillMaxSize()) {
9472
CameraView(
9573
modifier = Modifier.fillMaxHeight(0.5f),
96-
onFrameCaptured = { bitmap ->
97-
bidiView.sendVideoFrame(bitmap)
98-
}
74+
onFrameCaptured = { bitmap -> bidiView.sendVideoFrame(bitmap) },
9975
)
10076
}
10177
} else {
10278
Text("Camera and audio permissions are required to use this feature.")
10379
}
10480
}
10581
}
106-
}
82+
}

0 commit comments

Comments
 (0)