diff --git a/firebase-vertexai/CHANGELOG.md b/firebase-vertexai/CHANGELOG.md index 37846fe233e..5619ab98447 100644 --- a/firebase-vertexai/CHANGELOG.md +++ b/firebase-vertexai/CHANGELOG.md @@ -3,6 +3,7 @@ `GenerativeModel` or `ImagenModel`. * [changed] Added new exception type for quota exceeded scenarios. * [feature] `CountTokenRequest` now includes `GenerationConfig` from the model. +* [feature] Added preliminary support for bidirectional streaming. This feature is not yet fully supported. * [changed] **Breaking Change**: `ImagenInlineImage.data` now returns the raw image bytes (in JPEG or PNG format, as specified in `ImagenInlineImage.mimeType`) instead of Base64-encoded data. (#6800) diff --git a/firebase-vertexai/api.txt b/firebase-vertexai/api.txt index 76491378d88..eb7eabdddf2 100644 --- a/firebase-vertexai/api.txt +++ b/firebase-vertexai/api.txt @@ -29,6 +29,11 @@ package com.google.firebase.vertexai { method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.ImagenModel imagenModel(String modelName, com.google.firebase.vertexai.type.ImagenGenerationConfig? generationConfig = null); method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.ImagenModel imagenModel(String modelName, com.google.firebase.vertexai.type.ImagenGenerationConfig? generationConfig = null, com.google.firebase.vertexai.type.ImagenSafetySettings? safetySettings = null); method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.ImagenModel imagenModel(String modelName, com.google.firebase.vertexai.type.ImagenGenerationConfig? generationConfig = null, com.google.firebase.vertexai.type.ImagenSafetySettings? safetySettings = null, com.google.firebase.vertexai.type.RequestOptions requestOptions = com.google.firebase.vertexai.type.RequestOptions()); + method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.LiveGenerativeModel liveModel(String modelName); + method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.LiveGenerativeModel liveModel(String modelName, com.google.firebase.vertexai.type.LiveGenerationConfig? generationConfig = null); + method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.LiveGenerativeModel liveModel(String modelName, com.google.firebase.vertexai.type.LiveGenerationConfig? generationConfig = null, java.util.List? tools = null); + method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.LiveGenerativeModel liveModel(String modelName, com.google.firebase.vertexai.type.LiveGenerationConfig? generationConfig = null, java.util.List? tools = null, com.google.firebase.vertexai.type.Content? systemInstruction = null); + method @com.google.firebase.vertexai.type.PublicPreviewAPI public com.google.firebase.vertexai.LiveGenerativeModel liveModel(String modelName, com.google.firebase.vertexai.type.LiveGenerationConfig? generationConfig = null, java.util.List? tools = null, com.google.firebase.vertexai.type.Content? systemInstruction = null, com.google.firebase.vertexai.type.RequestOptions requestOptions = com.google.firebase.vertexai.type.RequestOptions()); property public static final com.google.firebase.vertexai.FirebaseVertexAI instance; field public static final com.google.firebase.vertexai.FirebaseVertexAI.Companion Companion; } @@ -63,6 +68,10 @@ package com.google.firebase.vertexai { method public suspend Object? generateImages(String prompt, kotlin.coroutines.Continuation>); } + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class LiveGenerativeModel { + method public suspend Object? connect(kotlin.coroutines.Continuation); + } + } package com.google.firebase.vertexai.java { @@ -105,10 +114,42 @@ package com.google.firebase.vertexai.java { method public com.google.firebase.vertexai.java.ImagenModelFutures from(com.google.firebase.vertexai.ImagenModel model); } + @com.google.firebase.vertexai.type.PublicPreviewAPI public abstract class LiveModelFutures { + method public abstract com.google.common.util.concurrent.ListenableFuture connect(); + method public static final com.google.firebase.vertexai.java.LiveModelFutures from(com.google.firebase.vertexai.LiveGenerativeModel model); + field public static final com.google.firebase.vertexai.java.LiveModelFutures.Companion Companion; + } + + public static final class LiveModelFutures.Companion { + method public com.google.firebase.vertexai.java.LiveModelFutures from(com.google.firebase.vertexai.LiveGenerativeModel model); + } + + @com.google.firebase.vertexai.type.PublicPreviewAPI public abstract class LiveSessionFutures { + method public abstract com.google.common.util.concurrent.ListenableFuture close(); + method public static final com.google.firebase.vertexai.java.LiveSessionFutures from(com.google.firebase.vertexai.type.LiveSession session); + method public abstract org.reactivestreams.Publisher receive(); + method public abstract com.google.common.util.concurrent.ListenableFuture send(com.google.firebase.vertexai.type.Content content); + method public abstract com.google.common.util.concurrent.ListenableFuture send(String text); + method public abstract com.google.common.util.concurrent.ListenableFuture sendFunctionResponse(java.util.List functionList); + method public abstract com.google.common.util.concurrent.ListenableFuture sendMediaStream(java.util.List mediaChunks); + method public abstract com.google.common.util.concurrent.ListenableFuture startAudioConversation(kotlin.jvm.functions.Function1? functionCallHandler); + method public abstract com.google.common.util.concurrent.ListenableFuture stopAudioConversation(); + method public abstract void stopReceiving(); + field public static final com.google.firebase.vertexai.java.LiveSessionFutures.Companion Companion; + } + + public static final class LiveSessionFutures.Companion { + method public com.google.firebase.vertexai.java.LiveSessionFutures from(com.google.firebase.vertexai.type.LiveSession session); + } + } package com.google.firebase.vertexai.type { + public final class AudioRecordInitializationFailedException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { + ctor public AudioRecordInitializationFailedException(String message); + } + public final class BlockReason { method public String getName(); method public int getOrdinal(); @@ -520,6 +561,85 @@ package com.google.firebase.vertexai.type { public final class InvalidStateException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { } + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class LiveContentResponse { + method public com.google.firebase.vertexai.type.Content? getData(); + method public java.util.List? getFunctionCalls(); + method public int getStatus(); + method public String? getText(); + property public final com.google.firebase.vertexai.type.Content? data; + property public final java.util.List? functionCalls; + property public final int status; + property public final String? text; + } + + @kotlin.jvm.JvmInline public static final value class LiveContentResponse.Status { + field public static final com.google.firebase.vertexai.type.LiveContentResponse.Status.Companion Companion; + } + + public static final class LiveContentResponse.Status.Companion { + method public int getINTERRUPTED(); + method public int getNORMAL(); + method public int getTURN_COMPLETE(); + property public final int INTERRUPTED; + property public final int NORMAL; + property public final int TURN_COMPLETE; + } + + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class LiveGenerationConfig { + field public static final com.google.firebase.vertexai.type.LiveGenerationConfig.Companion Companion; + } + + public static final class LiveGenerationConfig.Builder { + ctor public LiveGenerationConfig.Builder(); + method public com.google.firebase.vertexai.type.LiveGenerationConfig build(); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setCandidateCount(Integer? candidateCount); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setFrequencyPenalty(Float? frequencyPenalty); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setMaxOutputTokens(Integer? maxOutputTokens); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setPresencePenalty(Float? presencePenalty); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setResponseModalities(com.google.firebase.vertexai.type.ResponseModality? responseModalities); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setSpeechConfig(com.google.firebase.vertexai.type.SpeechConfig? speechConfig); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setTemperature(Float? temperature); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setTopK(Integer? topK); + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder setTopP(Float? topP); + field public Integer? candidateCount; + field public Float? frequencyPenalty; + field public Integer? maxOutputTokens; + field public Float? presencePenalty; + field public com.google.firebase.vertexai.type.ResponseModality? responseModality; + field public com.google.firebase.vertexai.type.SpeechConfig? speechConfig; + field public Float? temperature; + field public Integer? topK; + field public Float? topP; + } + + public static final class LiveGenerationConfig.Companion { + method public com.google.firebase.vertexai.type.LiveGenerationConfig.Builder builder(); + } + + public final class LiveGenerationConfigKt { + method public static com.google.firebase.vertexai.type.LiveGenerationConfig liveGenerationConfig(kotlin.jvm.functions.Function1 init); + } + + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class LiveSession { + method public suspend Object? close(kotlin.coroutines.Continuation); + method public kotlinx.coroutines.flow.Flow receive(); + method public suspend Object? send(com.google.firebase.vertexai.type.Content content, kotlin.coroutines.Continuation); + method public suspend Object? send(String text, kotlin.coroutines.Continuation); + method public suspend Object? sendFunctionResponse(java.util.List functionList, kotlin.coroutines.Continuation); + method public suspend Object? sendMediaStream(java.util.List mediaChunks, kotlin.coroutines.Continuation); + method public suspend Object? startAudioConversation(kotlin.jvm.functions.Function1? functionCallHandler = null, kotlin.coroutines.Continuation); + method public void stopAudioConversation(); + method public void stopReceiving(); + } + + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class MediaData { + ctor public MediaData(byte[] data, String mimeType); + method public byte[] getData(); + method public String getMimeType(); + property public final byte[] data; + property public final String mimeType; + } + public final class ModalityTokenCount { method public operator com.google.firebase.vertexai.type.ContentModality component1(); method public operator int component2(); @@ -568,6 +688,19 @@ package com.google.firebase.vertexai.type { public final class RequestTimeoutException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { } + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class ResponseModality { + method public int getOrdinal(); + property public final int ordinal; + field public static final com.google.firebase.vertexai.type.ResponseModality AUDIO; + field public static final com.google.firebase.vertexai.type.ResponseModality.Companion Companion; + field public static final com.google.firebase.vertexai.type.ResponseModality IMAGE; + field public static final com.google.firebase.vertexai.type.ResponseModality TEXT; + field public static final com.google.firebase.vertexai.type.ResponseModality UNSPECIFIED; + } + + public static final class ResponseModality.Companion { + } + public final class ResponseStoppedException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { method public com.google.firebase.vertexai.type.GenerateContentResponse getResponse(); property public final com.google.firebase.vertexai.type.GenerateContentResponse response; @@ -679,9 +812,23 @@ package com.google.firebase.vertexai.type { public final class ServerException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { } + public final class ServiceConnectionHandshakeFailedException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { + ctor public ServiceConnectionHandshakeFailedException(String message, Throwable? cause = null); + } + public final class ServiceDisabledException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { } + public final class SessionAlreadyReceivingException extends com.google.firebase.vertexai.type.FirebaseVertexAIException { + ctor public SessionAlreadyReceivingException(); + } + + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class SpeechConfig { + ctor public SpeechConfig(com.google.firebase.vertexai.type.Voices voice); + method public com.google.firebase.vertexai.type.Voices getVoice(); + property public final com.google.firebase.vertexai.type.Voices voice; + } + public abstract class StringFormat { } @@ -728,5 +875,20 @@ package com.google.firebase.vertexai.type { property public final int totalTokenCount; } + @com.google.firebase.vertexai.type.PublicPreviewAPI public final class Voices { + method public int getOrdinal(); + property public final int ordinal; + field public static final com.google.firebase.vertexai.type.Voices AOEDE; + field public static final com.google.firebase.vertexai.type.Voices CHARON; + field public static final com.google.firebase.vertexai.type.Voices.Companion Companion; + field public static final com.google.firebase.vertexai.type.Voices FENRIR; + field public static final com.google.firebase.vertexai.type.Voices KORE; + field public static final com.google.firebase.vertexai.type.Voices PUCK; + field public static final com.google.firebase.vertexai.type.Voices UNSPECIFIED; + } + + public static final class Voices.Companion { + } + } diff --git a/firebase-vertexai/firebase-vertexai.gradle.kts b/firebase-vertexai/firebase-vertexai.gradle.kts index 6e2e604d26f..f728e905cbb 100644 --- a/firebase-vertexai/firebase-vertexai.gradle.kts +++ b/firebase-vertexai/firebase-vertexai.gradle.kts @@ -63,7 +63,10 @@ android { isReturnDefaultValues = true } } - lint { targetSdk = targetSdkVersion } + lint { + targetSdk = targetSdkVersion + baseline = file("lint-baseline.xml") + } sourceSets { getByName("test").java.srcDirs("src/testUtil") } } @@ -84,6 +87,7 @@ tasks.withType().all { dependencies { implementation(libs.ktor.client.okhttp) implementation(libs.ktor.client.core) + implementation(libs.ktor.client.websockets) implementation(libs.ktor.client.content.negotiation) implementation(libs.ktor.serialization.kotlinx.json) implementation(libs.ktor.client.logging) diff --git a/firebase-vertexai/lint-baseline.xml b/firebase-vertexai/lint-baseline.xml new file mode 100644 index 00000000000..5f6b1f3ebfd --- /dev/null +++ b/firebase-vertexai/lint-baseline.xml @@ -0,0 +1,30 @@ + + + + + + + + + diff --git a/firebase-vertexai/src/main/AndroidManifest.xml b/firebase-vertexai/src/main/AndroidManifest.xml index f61156bd1b5..1a791682f4b 100644 --- a/firebase-vertexai/src/main/AndroidManifest.xml +++ b/firebase-vertexai/src/main/AndroidManifest.xml @@ -20,7 +20,11 @@ - + + + + + , private val internalAuthProvider: Provider, @@ -46,7 +50,7 @@ internal constructor( /** * Instantiates a new [GenerativeModel] given the provided parameters. * - * @param modelName The name of the model to use, for example `"gemini-1.5-pro"`. + * @param modelName The name of the model to use, for example `"gemini-2.0-flash-exp"`. * @param generationConfig The configuration parameters to use for content generation. * @param safetySettings The safety bounds the model will abide to during content generation. * @param tools A list of [Tool]s the model may use to generate content. @@ -93,6 +97,53 @@ internal constructor( ) } + /** + * Instantiates a new [LiveGenerationConfig] given the provided parameters. + * + * @param modelName The name of the model to use, for example `"gemini-2.0-flash-exp"`. + * @param generationConfig The configuration parameters to use for content generation. + * @param tools A list of [Tool]s the model may use to generate content. + * @param systemInstruction [Content] instructions that direct the model to behave a certain way. + * Currently only text content is supported. + * @param requestOptions Configuration options for sending requests to the backend. + * @return The initialized [LiveGenerativeModel] instance. + */ + @JvmOverloads + @PublicPreviewAPI + public fun liveModel( + modelName: String, + generationConfig: LiveGenerationConfig? = null, + tools: List? = null, + systemInstruction: Content? = null, + requestOptions: RequestOptions = RequestOptions(), + ): LiveGenerativeModel { + if (!modelName.startsWith(GEMINI_MODEL_NAME_PREFIX)) { + Log.w( + TAG, + """Unsupported Gemini model "$modelName"; see + https://firebase.google.com/docs/vertex-ai/models for a list supported Gemini model names. + """ + .trimIndent() + ) + } + if (location.trim().isEmpty() || location.contains("/")) { + throw InvalidLocationException(location) + } + return LiveGenerativeModel( + "projects/${firebaseApp.options.projectId}/locations/${location}/publishers/google/models/${modelName}", + firebaseApp.options.apiKey, + firebaseApp, + backgroundDispatcher, + generationConfig, + tools, + systemInstruction, + location, + requestOptions, + appCheckProvider.get(), + internalAuthProvider.get(), + ) + } + /** * Instantiates a new [ImagenModel] given the provided parameters. * diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIMultiResourceComponent.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIMultiResourceComponent.kt index 213351fdc92..1b9cb7a4909 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIMultiResourceComponent.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIMultiResourceComponent.kt @@ -18,9 +18,11 @@ package com.google.firebase.vertexai import androidx.annotation.GuardedBy import com.google.firebase.FirebaseApp +import com.google.firebase.annotations.concurrent.Background import com.google.firebase.appcheck.interop.InteropAppCheckTokenProvider import com.google.firebase.auth.internal.InternalAuthProvider import com.google.firebase.inject.Provider +import kotlin.coroutines.CoroutineContext /** * Multi-resource container for Firebase Vertex AI. @@ -29,8 +31,9 @@ import com.google.firebase.inject.Provider */ internal class FirebaseVertexAIMultiResourceComponent( private val app: FirebaseApp, + @Background val backgroundDispatcher: CoroutineContext, private val appCheckProvider: Provider, - private val internalAuthProvider: Provider + private val internalAuthProvider: Provider, ) { @GuardedBy("this") private val instances: MutableMap = mutableMapOf() @@ -38,8 +41,13 @@ internal class FirebaseVertexAIMultiResourceComponent( fun get(location: String): FirebaseVertexAI = synchronized(this) { instances[location] - ?: FirebaseVertexAI(app, location, appCheckProvider, internalAuthProvider).also { - instances[location] = it - } + ?: FirebaseVertexAI( + app, + backgroundDispatcher, + location, + appCheckProvider, + internalAuthProvider + ) + .also { instances[location] = it } } } diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIRegistrar.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIRegistrar.kt index fca48ae395a..ff5409567a9 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIRegistrar.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/FirebaseVertexAIRegistrar.kt @@ -18,13 +18,16 @@ package com.google.firebase.vertexai import androidx.annotation.Keep import com.google.firebase.FirebaseApp +import com.google.firebase.annotations.concurrent.Background import com.google.firebase.appcheck.interop.InteropAppCheckTokenProvider import com.google.firebase.auth.internal.InternalAuthProvider import com.google.firebase.components.Component import com.google.firebase.components.ComponentRegistrar import com.google.firebase.components.Dependency +import com.google.firebase.components.Qualified import com.google.firebase.components.Qualified.unqualified import com.google.firebase.platforminfo.LibraryVersionComponent +import kotlinx.coroutines.CoroutineDispatcher /** * [ComponentRegistrar] for setting up [FirebaseVertexAI] and its internal dependencies. @@ -38,11 +41,13 @@ internal class FirebaseVertexAIRegistrar : ComponentRegistrar { Component.builder(FirebaseVertexAIMultiResourceComponent::class.java) .name(LIBRARY_NAME) .add(Dependency.required(firebaseApp)) + .add(Dependency.required(backgroundDispatcher)) .add(Dependency.optionalProvider(appCheckInterop)) .add(Dependency.optionalProvider(internalAuthProvider)) .factory { container -> FirebaseVertexAIMultiResourceComponent( container[firebaseApp], + container.get(backgroundDispatcher), container.getProvider(appCheckInterop), container.getProvider(internalAuthProvider) ) @@ -57,5 +62,7 @@ internal class FirebaseVertexAIRegistrar : ComponentRegistrar { private val firebaseApp = unqualified(FirebaseApp::class.java) private val appCheckInterop = unqualified(InteropAppCheckTokenProvider::class.java) private val internalAuthProvider = unqualified(InternalAuthProvider::class.java) + private val backgroundDispatcher = + Qualified.qualified(Background::class.java, CoroutineDispatcher::class.java) } } diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/LiveGenerativeModel.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/LiveGenerativeModel.kt new file mode 100644 index 00000000000..e557b694620 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/LiveGenerativeModel.kt @@ -0,0 +1,123 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai + +import com.google.firebase.FirebaseApp +import com.google.firebase.annotations.concurrent.Background +import com.google.firebase.appcheck.interop.InteropAppCheckTokenProvider +import com.google.firebase.auth.internal.InternalAuthProvider +import com.google.firebase.vertexai.common.APIController +import com.google.firebase.vertexai.common.AppCheckHeaderProvider +import com.google.firebase.vertexai.type.BidiGenerateContentClientMessage +import com.google.firebase.vertexai.type.Content +import com.google.firebase.vertexai.type.LiveGenerationConfig +import com.google.firebase.vertexai.type.LiveSession +import com.google.firebase.vertexai.type.PublicPreviewAPI +import com.google.firebase.vertexai.type.RequestOptions +import com.google.firebase.vertexai.type.ServiceConnectionHandshakeFailedException +import com.google.firebase.vertexai.type.Tool +import io.ktor.websocket.Frame +import io.ktor.websocket.close +import io.ktor.websocket.readBytes +import kotlin.coroutines.CoroutineContext +import kotlinx.coroutines.channels.ClosedReceiveChannelException +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json + +/** + * Represents a multimodal model (like Gemini) capable of real-time content generation based on + * various input types, supporting bidirectional streaming. + */ +@PublicPreviewAPI +public class LiveGenerativeModel +internal constructor( + private val modelName: String, + @Background private val backgroundDispatcher: CoroutineContext, + private val config: LiveGenerationConfig? = null, + private val tools: List? = null, + private val systemInstruction: Content? = null, + private val location: String, + private val controller: APIController +) { + internal constructor( + modelName: String, + apiKey: String, + firebaseApp: FirebaseApp, + backgroundDispatcher: CoroutineContext, + config: LiveGenerationConfig? = null, + tools: List? = null, + systemInstruction: Content? = null, + location: String = "us-central1", + requestOptions: RequestOptions = RequestOptions(), + appCheckTokenProvider: InteropAppCheckTokenProvider? = null, + internalAuthProvider: InternalAuthProvider? = null, + ) : this( + modelName, + backgroundDispatcher, + config, + tools, + systemInstruction, + location, + APIController( + apiKey, + modelName, + requestOptions, + "gl-kotlin/${KotlinVersion.CURRENT} fire/${BuildConfig.VERSION_NAME}", + firebaseApp, + AppCheckHeaderProvider(TAG, appCheckTokenProvider, internalAuthProvider), + ), + ) + + /** + * Start a [LiveSession] with the server for bidirectional streaming. + * + * @return A [LiveSession] that you can use to stream messages to and from the server. + * @throws [ServiceConnectionHandshakeFailedException] If the client was not able to establish a + * connection with the server. + */ + @OptIn(ExperimentalSerializationApi::class) + public suspend fun connect(): LiveSession { + val clientMessage = + BidiGenerateContentClientMessage( + modelName, + config?.toInternal(), + tools?.map { it.toInternal() }, + systemInstruction?.toInternal() + ) + .toInternal() + val data: String = Json.encodeToString(clientMessage) + try { + val webSession = controller.getWebSocketSession(location) + webSession.send(Frame.Text(data)) + val receivedJson = webSession.incoming.receive().readBytes().toString(Charsets.UTF_8) + // TODO: Try to decode the json instead of string matching. + return if (receivedJson.contains("setupComplete")) { + LiveSession(session = webSession, backgroundDispatcher = backgroundDispatcher) + } else { + webSession.close() + throw ServiceConnectionHandshakeFailedException("Unable to connect to the server") + } + } catch (e: ClosedReceiveChannelException) { + throw ServiceConnectionHandshakeFailedException("Channel was closed by the server", e) + } + } + + private companion object { + private val TAG = LiveGenerativeModel::class.java.simpleName + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/common/APIController.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/common/APIController.kt index c67e21ccf23..da580429f8c 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/common/APIController.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/common/APIController.kt @@ -36,6 +36,9 @@ import io.ktor.client.engine.HttpClientEngine import io.ktor.client.engine.okhttp.OkHttp import io.ktor.client.plugins.HttpTimeout import io.ktor.client.plugins.contentnegotiation.ContentNegotiation +import io.ktor.client.plugins.websocket.ClientWebSocketSession +import io.ktor.client.plugins.websocket.WebSockets +import io.ktor.client.plugins.websocket.webSocketSession import io.ktor.client.request.HttpRequestBuilder import io.ktor.client.request.header import io.ktor.client.request.post @@ -126,6 +129,7 @@ internal constructor( socketTimeoutMillis = max(180.seconds.inWholeMilliseconds, requestOptions.timeout.inWholeMilliseconds) } + install(WebSockets) install(ContentNegotiation) { json(JSON) } } @@ -156,6 +160,11 @@ internal constructor( throw FirebaseCommonAIException.from(e) } + private fun getBidiEndpoint(location: String): String = + "wss://firebasevertexai.googleapis.com/ws/google.firebase.vertexai.v1beta.LlmBidiService/BidiGenerateContent/locations/$location?key=$key" + + suspend fun getWebSocketSession(location: String): ClientWebSocketSession = + client.webSocketSession(getBidiEndpoint(location)) fun generateContentStream( request: GenerateContentRequest ): Flow = diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveModelFutures.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveModelFutures.kt new file mode 100644 index 00000000000..c167e700a5e --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveModelFutures.kt @@ -0,0 +1,53 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.java + +import androidx.concurrent.futures.SuspendToFutureAdapter +import com.google.common.util.concurrent.ListenableFuture +import com.google.firebase.vertexai.LiveGenerativeModel +import com.google.firebase.vertexai.type.LiveSession +import com.google.firebase.vertexai.type.PublicPreviewAPI +import com.google.firebase.vertexai.type.ServiceConnectionHandshakeFailedException + +/** + * Wrapper class providing Java compatible methods for [LiveGenerativeModel]. + * + * @see [LiveGenerativeModel] + */ +@PublicPreviewAPI +public abstract class LiveModelFutures internal constructor() { + + /** + * Start a [LiveSession] with the server for bidirectional streaming. + * @return A [LiveSession] that you can use to stream messages to and from the server. + * @throws [ServiceConnectionHandshakeFailedException] If the client was not able to establish a + * connection with the server. + */ + public abstract fun connect(): ListenableFuture + + private class FuturesImpl(private val model: LiveGenerativeModel) : LiveModelFutures() { + override fun connect(): ListenableFuture { + return SuspendToFutureAdapter.launchFuture { model.connect() } + } + } + + public companion object { + + /** @return a [LiveModelFutures] created around the provided [LiveGenerativeModel] */ + @JvmStatic public fun from(model: LiveGenerativeModel): LiveModelFutures = FuturesImpl(model) + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveSessionFutures.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveSessionFutures.kt new file mode 100644 index 00000000000..044f83e8cc1 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/java/LiveSessionFutures.kt @@ -0,0 +1,138 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.java + +import androidx.concurrent.futures.SuspendToFutureAdapter +import com.google.common.util.concurrent.ListenableFuture +import com.google.firebase.vertexai.type.Content +import com.google.firebase.vertexai.type.FunctionCallPart +import com.google.firebase.vertexai.type.FunctionResponsePart +import com.google.firebase.vertexai.type.LiveContentResponse +import com.google.firebase.vertexai.type.LiveSession +import com.google.firebase.vertexai.type.MediaData +import com.google.firebase.vertexai.type.PublicPreviewAPI +import com.google.firebase.vertexai.type.SessionAlreadyReceivingException +import kotlinx.coroutines.reactive.asPublisher +import org.reactivestreams.Publisher + +/** + * Wrapper class providing Java compatible methods for [LiveSession]. + * + * @see [LiveSession] + */ +@PublicPreviewAPI +public abstract class LiveSessionFutures internal constructor() { + + /** + * Starts an audio conversation with the Gemini server, which can only be stopped using + * [stopAudioConversation]. + * + * @param functionCallHandler A callback function to map function calls from the server to their + * response parts. + */ + public abstract fun startAudioConversation( + functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)? + ): ListenableFuture + + /** + * Stops the audio conversation with the Gemini Server. + * + * @see [startAudioConversation] + * @see [stopReceiving] + */ + public abstract fun stopAudioConversation(): ListenableFuture + + /** Stop receiving from the server. */ + public abstract fun stopReceiving() + + /** + * Sends the function response from the client to the server. + * + * @param functionList The list of [FunctionResponsePart] instances indicating the function + * response from the client. + */ + public abstract fun sendFunctionResponse( + functionList: List + ): ListenableFuture + + /** + * Streams client data to the server. + * + * @param mediaChunks The list of [MediaData] instances representing the media data to be sent. + */ + public abstract fun sendMediaStream(mediaChunks: List): ListenableFuture + + /** + * Sends [data][Content] to the server. + * + * @param content Client [Content] to be sent to the server. + */ + public abstract fun send(content: Content): ListenableFuture + + /** + * Sends text to the server + * + * @param text Text to be sent to the server. + */ + public abstract fun send(text: String): ListenableFuture + + /** Closes the client session. */ + public abstract fun close(): ListenableFuture + + /** + * Receives responses from the server for both streaming and standard requests. + * + * @return A [Publisher] which will emit [LiveContentResponse] as and when it receives it. + * + * @throws [SessionAlreadyReceivingException] When the session is already receiving. + */ + public abstract fun receive(): Publisher + + private class FuturesImpl(private val session: LiveSession) : LiveSessionFutures() { + + override fun receive(): Publisher = session.receive().asPublisher() + + override fun close(): ListenableFuture = + SuspendToFutureAdapter.launchFuture { session.close() } + + override fun send(text: String) = SuspendToFutureAdapter.launchFuture { session.send(text) } + + override fun send(content: Content) = + SuspendToFutureAdapter.launchFuture { session.send(content) } + + override fun sendFunctionResponse(functionList: List) = + SuspendToFutureAdapter.launchFuture { session.sendFunctionResponse(functionList) } + + override fun sendMediaStream(mediaChunks: List) = + SuspendToFutureAdapter.launchFuture { session.sendMediaStream(mediaChunks) } + + override fun startAudioConversation( + functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)? + ) = SuspendToFutureAdapter.launchFuture { session.startAudioConversation(functionCallHandler) } + + override fun stopAudioConversation() = + SuspendToFutureAdapter.launchFuture { session.stopAudioConversation() } + + override fun stopReceiving() = session.stopReceiving() + } + + public companion object { + + /** @return a [LiveSessionFutures] created around the provided [LiveSession] */ + @JvmStatic public fun from(session: LiveSession): LiveSessionFutures = FuturesImpl(session) + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/AudioHelper.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/AudioHelper.kt new file mode 100644 index 00000000000..35edac88db0 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/AudioHelper.kt @@ -0,0 +1,120 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import android.Manifest +import android.media.AudioFormat +import android.media.AudioManager +import android.media.AudioRecord +import android.media.AudioTrack +import android.media.MediaRecorder +import android.media.audiofx.AcousticEchoCanceler +import androidx.annotation.RequiresPermission +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.flow + +@PublicPreviewAPI +internal class AudioHelper { + + private lateinit var audioRecord: AudioRecord + private lateinit var audioTrack: AudioTrack + private var stopRecording: Boolean = false + + internal fun release() { + stopRecording = true + if (::audioRecord.isInitialized) { + audioRecord.stop() + audioRecord.release() + } + if (::audioTrack.isInitialized) { + audioTrack.stop() + audioTrack.release() + } + } + + internal fun setupAudioTrack() { + audioTrack = + AudioTrack( + AudioManager.STREAM_MUSIC, + 24000, + AudioFormat.CHANNEL_OUT_MONO, + AudioFormat.ENCODING_PCM_16BIT, + AudioTrack.getMinBufferSize( + 24000, + AudioFormat.CHANNEL_OUT_MONO, + AudioFormat.ENCODING_PCM_16BIT + ), + AudioTrack.MODE_STREAM + ) + audioTrack.play() + } + + internal fun playAudio(data: ByteArray) { + if (!stopRecording) { + audioTrack.write(data, 0, data.size) + } + } + + @RequiresPermission(Manifest.permission.RECORD_AUDIO) + fun startRecording(): Flow { + + val bufferSize = + AudioRecord.getMinBufferSize( + 16000, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT + ) + if ( + bufferSize == AudioRecord.ERROR || + bufferSize == AudioRecord.ERROR_BAD_VALUE || + bufferSize <= 0 + ) { + throw AudioRecordInitializationFailedException( + "Audio Record buffer size is invalid (${bufferSize})" + ) + } + audioRecord = + AudioRecord( + MediaRecorder.AudioSource.VOICE_COMMUNICATION, + 16000, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT, + bufferSize + ) + if (audioRecord.state != AudioRecord.STATE_INITIALIZED) { + throw AudioRecordInitializationFailedException( + "Audio Record initialization has failed. State: ${audioRecord.state}" + ) + } + if (AcousticEchoCanceler.isAvailable()) { + val echoCanceler = AcousticEchoCanceler.create(audioRecord.audioSessionId) + echoCanceler?.enabled = true + } + + audioRecord.startRecording() + + return flow { + val buffer = ByteArray(bufferSize) + while (!stopRecording) { + val bytesRead = audioRecord.read(buffer, 0, buffer.size) + if (bytesRead > 0) { + emit(buffer.copyOf(bytesRead)) + } + } + } + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/BidiGenerateContentClientMessage.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/BidiGenerateContentClientMessage.kt new file mode 100644 index 00000000000..5488cb240f5 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/BidiGenerateContentClientMessage.kt @@ -0,0 +1,44 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.Serializable + +@OptIn(ExperimentalSerializationApi::class) +@PublicPreviewAPI +internal class BidiGenerateContentClientMessage( + val model: String, + val generationConfig: LiveGenerationConfig.Internal?, + val tools: List?, + val systemInstruction: Content.Internal? +) { + + @Serializable + internal class Internal(val setup: BidiGenerateContentSetup) { + @Serializable + internal data class BidiGenerateContentSetup( + val model: String, + val generationConfig: LiveGenerationConfig.Internal?, + val tools: List?, + val systemInstruction: Content.Internal? + ) + } + + fun toInternal() = + Internal(Internal.BidiGenerateContentSetup(model, generationConfig, tools, systemInstruction)) +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ContentModality.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ContentModality.kt index dd928f92273..ecd4e74d80a 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ContentModality.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ContentModality.kt @@ -46,6 +46,15 @@ public class ContentModality private constructor(public val ordinal: Int) { } } + internal fun toInternal() = + when (this) { + TEXT -> "TEXT" + IMAGE -> "IMAGE" + VIDEO -> "VIDEO" + AUDIO -> "AUDIO" + DOCUMENT -> "DOCUMENT" + else -> "UNSPECIFIED" + } public companion object { /** Unspecified modality. */ @JvmField public val UNSPECIFIED: ContentModality = ContentModality(0) diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Exceptions.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Exceptions.kt index 4a29e5c37ea..f3256bf4c15 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Exceptions.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Exceptions.kt @@ -175,6 +175,20 @@ public class QuotaExceededException internal constructor(message: String, cause: Throwable? = null) : FirebaseVertexAIException(message, cause) +/** Streaming session already receiving. */ +public class SessionAlreadyReceivingException : + FirebaseVertexAIException( + "This session is already receiving. Please call stopReceiving() before calling this again." + ) + +/** Audio record initialization failures for audio streaming */ +public class AudioRecordInitializationFailedException(message: String) : + FirebaseVertexAIException(message) + +/** Handshake failed with the server */ +public class ServiceConnectionHandshakeFailedException(message: String, cause: Throwable? = null) : + FirebaseVertexAIException(message, cause) + /** Catch all case for exceptions not explicitly expected. */ public class UnknownException internal constructor(message: String, cause: Throwable? = null) : FirebaseVertexAIException(message, cause) diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveContentResponse.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveContentResponse.kt new file mode 100644 index 00000000000..96021745d1d --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveContentResponse.kt @@ -0,0 +1,42 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +/* Represents the response from the server. */ +@PublicPreviewAPI +public class LiveContentResponse +internal constructor( + public val data: Content?, + public val status: Status, + public val functionCalls: List? +) { + /** + * Convenience field representing all the text parts in the response as a single string, if they + * exists. + */ + public val text: String? = + data?.parts?.filterIsInstance()?.joinToString(" ") { it.text } + + @JvmInline + public value class Status private constructor(private val value: Int) { + public companion object { + public val NORMAL: Status = Status(0) + public val INTERRUPTED: Status = Status(1) + public val TURN_COMPLETE: Status = Status(2) + } + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveGenerationConfig.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveGenerationConfig.kt new file mode 100644 index 00000000000..55e789fd14f --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveGenerationConfig.kt @@ -0,0 +1,217 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** + * Configuration parameters to use for content generation. + * + * @property temperature A parameter controlling the degree of randomness in token selection. A + * temperature of 0 means that the highest probability tokens are always selected. In this case, + * responses for a given prompt are mostly deterministic, but a small amount of variation is still + * possible. + * + * @property topK The `topK` parameter changes how the model selects tokens for output. A `topK` of + * 1 means the selected token is the most probable among all the tokens in the model's vocabulary, + * while a `topK` of 3 means that the next token is selected from among the 3 most probable using + * the `temperature`. For each token selection step, the `topK` tokens with the highest + * probabilities are sampled. Tokens are then further filtered based on `topP` with the final token + * selected using `temperature` sampling. Defaults to 40 if unspecified. + * + * @property topP The `topP` parameter changes how the model selects tokens for output. Tokens are + * selected from the most to least probable until the sum of their probabilities equals the `topP` + * value. For example, if tokens A, B, and C have probabilities of 0.3, 0.2, and 0.1 respectively + * and the topP value is 0.5, then the model will select either A or B as the next token by using + * the `temperature` and exclude C as a candidate. Defaults to 0.95 if unset. + * + * @property candidateCount The maximum number of generated response messages to return. This value + * must be between [1, 8], inclusive. If unset, this will default to 1. + * + * - Note: Only unique candidates are returned. Higher temperatures are more likely to produce + * unique candidates. Setting `temperature` to 0 will always produce exactly one candidate + * regardless of the `candidateCount`. + * + * @property presencePenalty Positive penalties. + * + * @property frequencyPenalty Frequency penalties. + * + * @property maxOutputTokens Specifies the maximum number of tokens that can be generated in the + * response. The number of tokens per word varies depending on the language outputted. Defaults to 0 + * (unbounded). + * + * @property responseModality Specifies the format of the data in which the server responds to + * requests + * + * @property speechConfig Specifies the voice configuration of the audio response from the server. + * + * Refer to the + * [Control generated output](https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/control-generated-output) + * guide for more details. + */ +@PublicPreviewAPI +public class LiveGenerationConfig +private constructor( + internal val temperature: Float?, + internal val topK: Int?, + internal val topP: Float?, + internal val candidateCount: Int?, + internal val maxOutputTokens: Int?, + internal val presencePenalty: Float?, + internal val frequencyPenalty: Float?, + internal val responseModality: ResponseModality?, + internal val speechConfig: SpeechConfig? +) { + + /** + * Builder for creating a [LiveGenerationConfig]. + * + * Mainly intended for Java interop. Kotlin consumers should use [liveGenerationConfig] for a more + * idiomatic experience. + * + * @property temperature See [LiveGenerationConfig.temperature]. + * + * @property topK See [LiveGenerationConfig.topK]. + * + * @property topP See [LiveGenerationConfig.topP]. + * + * @property presencePenalty See [LiveGenerationConfig.presencePenalty] + * + * @property frequencyPenalty See [LiveGenerationConfig.frequencyPenalty] + * + * @property candidateCount See [LiveGenerationConfig.candidateCount]. + * + * @property maxOutputTokens See [LiveGenerationConfig.maxOutputTokens]. + * + * @property responseModality See [LiveGenerationConfig.responseModality] + * + * @property speechConfig See [LiveGenerationConfig.speechConfig] + */ + public class Builder { + @JvmField public var temperature: Float? = null + @JvmField public var topK: Int? = null + @JvmField public var topP: Float? = null + @JvmField public var candidateCount: Int? = null + @JvmField public var maxOutputTokens: Int? = null + @JvmField public var presencePenalty: Float? = null + @JvmField public var frequencyPenalty: Float? = null + @JvmField public var responseModality: ResponseModality? = null + @JvmField public var speechConfig: SpeechConfig? = null + + public fun setTemperature(temperature: Float?): Builder = apply { + this.temperature = temperature + } + public fun setTopK(topK: Int?): Builder = apply { this.topK = topK } + public fun setTopP(topP: Float?): Builder = apply { this.topP = topP } + public fun setCandidateCount(candidateCount: Int?): Builder = apply { + this.candidateCount = candidateCount + } + public fun setMaxOutputTokens(maxOutputTokens: Int?): Builder = apply { + this.maxOutputTokens = maxOutputTokens + } + public fun setPresencePenalty(presencePenalty: Float?): Builder = apply { + this.presencePenalty = presencePenalty + } + public fun setFrequencyPenalty(frequencyPenalty: Float?): Builder = apply { + this.frequencyPenalty = frequencyPenalty + } + public fun setResponseModalities(responseModalities: ResponseModality?): Builder = apply { + this.responseModality = responseModalities + } + public fun setSpeechConfig(speechConfig: SpeechConfig?): Builder = apply { + this.speechConfig = speechConfig + } + + /** Create a new [LiveGenerationConfig] with the attached arguments. */ + public fun build(): LiveGenerationConfig = + LiveGenerationConfig( + temperature = temperature, + topK = topK, + topP = topP, + candidateCount = candidateCount, + maxOutputTokens = maxOutputTokens, + presencePenalty = presencePenalty, + frequencyPenalty = frequencyPenalty, + speechConfig = speechConfig, + responseModality = responseModality + ) + } + + internal fun toInternal(): Internal { + return Internal( + temperature = temperature, + topP = topP, + topK = topK, + candidateCount = candidateCount, + maxOutputTokens = maxOutputTokens, + frequencyPenalty = frequencyPenalty, + presencePenalty = presencePenalty, + speechConfig = speechConfig?.toInternal(), + responseModalities = + if (responseModality != null) listOf(responseModality.toInternal()) else null + ) + } + + @Serializable + internal data class Internal( + val temperature: Float?, + @SerialName("top_p") val topP: Float?, + @SerialName("top_k") val topK: Int?, + @SerialName("candidate_count") val candidateCount: Int?, + @SerialName("max_output_tokens") val maxOutputTokens: Int?, + @SerialName("presence_penalty") val presencePenalty: Float? = null, + @SerialName("frequency_penalty") val frequencyPenalty: Float? = null, + @SerialName("speech_config") val speechConfig: SpeechConfig.Internal? = null, + @SerialName("response_modalities") val responseModalities: List? = null + ) + + public companion object { + + /** + * Alternative casing for [LiveGenerationConfig.Builder]: + * ``` + * val config = LiveGenerationConfig.builder() + * ``` + */ + public fun builder(): Builder = Builder() + } +} + +/** + * Helper method to construct a [LiveGenerationConfig] in a DSL-like manner. + * + * Example Usage: + * ``` + * liveGenerationConfig { + * temperature = 0.75f + * topP = 0.5f + * topK = 30 + * candidateCount = 4 + * maxOutputTokens = 300 + * ... + * } + * ``` + */ +@OptIn(PublicPreviewAPI::class) +public fun liveGenerationConfig( + init: LiveGenerationConfig.Builder.() -> Unit +): LiveGenerationConfig { + val builder = LiveGenerationConfig.builder() + builder.init() + return builder.build() +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveSession.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveSession.kt new file mode 100644 index 00000000000..b3bdae1f707 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/LiveSession.kt @@ -0,0 +1,358 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import android.media.AudioFormat +import android.media.AudioTrack +import android.util.Log +import com.google.firebase.annotations.concurrent.Background +import io.ktor.client.plugins.websocket.ClientWebSocketSession +import io.ktor.websocket.Frame +import io.ktor.websocket.close +import io.ktor.websocket.readBytes +import java.util.concurrent.ConcurrentLinkedQueue +import kotlin.coroutines.CoroutineContext +import kotlinx.coroutines.CoroutineScope +import kotlinx.coroutines.cancel +import kotlinx.coroutines.channels.Channel +import kotlinx.coroutines.flow.Flow +import kotlinx.coroutines.flow.flow +import kotlinx.coroutines.flow.receiveAsFlow +import kotlinx.coroutines.launch +import kotlinx.serialization.ExperimentalSerializationApi +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json +import kotlinx.serialization.json.JsonNull + +/** Represents a live WebSocket session capable of streaming content to and from the server. */ +@PublicPreviewAPI +@OptIn(ExperimentalSerializationApi::class) +public class LiveSession +internal constructor( + private val session: ClientWebSocketSession?, + @Background private val backgroundDispatcher: CoroutineContext, + private var audioHelper: AudioHelper? = null +) { + + private val audioQueue = ConcurrentLinkedQueue() + private val playBackQueue = ConcurrentLinkedQueue() + private var startedReceiving = false + private var receiveChannel: Channel = Channel() + private var isRecording: Boolean = false + + private companion object { + val TAG = LiveSession::class.java.simpleName + val MIN_BUFFER_SIZE = + AudioTrack.getMinBufferSize( + 24000, + AudioFormat.CHANNEL_OUT_MONO, + AudioFormat.ENCODING_PCM_16BIT + ) + } + + internal class ClientContentSetup(val turns: List, val turnComplete: Boolean) { + @Serializable + internal class Internal(@SerialName("client_content") val clientContent: ClientContent) { + @Serializable + internal data class ClientContent( + val turns: List, + @SerialName("turn_complete") val turnComplete: Boolean + ) + } + + fun toInternal() = Internal(Internal.ClientContent(turns, turnComplete)) + } + + @OptIn(ExperimentalSerializationApi::class) + internal class ToolResponseSetup( + val functionResponses: List + ) { + + @Serializable + internal data class Internal(val toolResponse: ToolResponse) { + @Serializable + internal data class ToolResponse( + val functionResponses: List + ) + } + + fun toInternal() = Internal(Internal.ToolResponse(functionResponses)) + } + + internal class ServerContentSetup(val modelTurn: Content.Internal) { + @Serializable + internal class Internal(@SerialName("serverContent") val serverContent: ServerContent) { + @Serializable + internal data class ServerContent(@SerialName("modelTurn") val modelTurn: Content.Internal) + } + + fun toInternal() = Internal(Internal.ServerContent(modelTurn)) + } + + internal class MediaStreamingSetup(val mediaChunks: List) { + @Serializable + internal class Internal(val realtimeInput: MediaChunks) { + @Serializable internal data class MediaChunks(val mediaChunks: List) + } + fun toInternal() = Internal(Internal.MediaChunks(mediaChunks)) + } + + internal data class ToolCallSetup( + val functionCalls: List + ) { + + @Serializable + internal class Internal(val toolCall: ToolCall) { + + @Serializable + internal data class ToolCall(val functionCalls: List) + } + + fun toInternal(): Internal { + return Internal(Internal.ToolCall(functionCalls)) + } + } + + private fun fillRecordedAudioQueue() { + CoroutineScope(backgroundDispatcher).launch { + audioHelper!!.startRecording().collect { + if (!isRecording) { + cancel() + } + audioQueue.add(it) + } + } + } + + private suspend fun sendAudioDataToServer() { + var offset = 0 + val audioBuffer = ByteArray(MIN_BUFFER_SIZE * 2) + while (isRecording) { + val receivedAudio = audioQueue.poll() ?: continue + receivedAudio.copyInto(audioBuffer, offset) + offset += receivedAudio.size + if (offset >= MIN_BUFFER_SIZE) { + sendMediaStream(listOf(MediaData(audioBuffer, "audio/pcm"))) + audioBuffer.fill(0) + offset = 0 + } + } + } + + private fun fillServerResponseAudioQueue( + functionCallsHandler: ((FunctionCallPart) -> FunctionResponsePart)? = null + ) { + CoroutineScope(backgroundDispatcher).launch { + receive().collect { + if (!isRecording) { + cancel() + } + when (it.status) { + LiveContentResponse.Status.INTERRUPTED -> + while (!playBackQueue.isEmpty()) playBackQueue.poll() + LiveContentResponse.Status.NORMAL -> + if (!it.functionCalls.isNullOrEmpty() && functionCallsHandler != null) { + sendFunctionResponse(it.functionCalls.map(functionCallsHandler).toList()) + } else { + val audioData = it.data?.parts?.get(0)?.asInlineDataPartOrNull()?.inlineData + if (audioData != null) { + playBackQueue.add(audioData) + } + } + } + } + } + } + + private fun playServerResponseAudio() { + CoroutineScope(backgroundDispatcher).launch { + while (isRecording) { + val x = playBackQueue.poll() ?: continue + audioHelper?.playAudio(x) + } + } + } + + /** + * Starts an audio conversation with the Gemini server, which can only be stopped using + * [stopAudioConversation]. + * + * @param functionCallHandler A callback function that is invoked whenever the server receives a + * function call. + */ + public suspend fun startAudioConversation( + functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)? = null + ) { + if (isRecording) { + Log.w(TAG, "startAudioConversation called after the recording has already started.") + return + } + isRecording = true + audioHelper = AudioHelper() + audioHelper!!.setupAudioTrack() + fillRecordedAudioQueue() + CoroutineScope(backgroundDispatcher).launch { sendAudioDataToServer() } + fillServerResponseAudioQueue(functionCallHandler) + playServerResponseAudio() + } + + /** + * Stops the audio conversation with the Gemini Server. This needs to be called only after calling + * [startAudioConversation] + */ + public fun stopAudioConversation() { + stopReceiving() + isRecording = false + audioHelper?.let { + while (playBackQueue.isNotEmpty()) playBackQueue.poll() + while (audioQueue.isNotEmpty()) audioQueue.poll() + it.release() + } + audioHelper = null + } + + /** + * Stops receiving from the server. If this function is called during an ongoing audio + * conversation, the server's response will not be received, and no audio will be played. + */ + public fun stopReceiving() { + if (!startedReceiving) { + return + } + receiveChannel.cancel() + receiveChannel = Channel() + startedReceiving = false + } + + /** + * Receives responses from the server for both streaming and standard requests. Call + * [stopReceiving] to stop receiving responses from the server. + * + * @return A [Flow] which will emit [LiveContentResponse] as and when it receives it + * + * @throws [SessionAlreadyReceivingException] when the session is already receiving. + */ + public fun receive(): Flow { + if (startedReceiving) { + throw SessionAlreadyReceivingException() + } + + val flowReceive = session!!.incoming.receiveAsFlow() + CoroutineScope(backgroundDispatcher).launch { flowReceive.collect { receiveChannel.send(it) } } + return flow { + startedReceiving = true + while (true) { + val message = receiveChannel.receive() + val receivedBytes = (message as Frame.Binary).readBytes() + val receivedJson = receivedBytes.toString(Charsets.UTF_8) + if (receivedJson.contains("interrupted")) { + emit(LiveContentResponse(null, LiveContentResponse.Status.INTERRUPTED, null)) + continue + } + if (receivedJson.contains("turnComplete")) { + emit(LiveContentResponse(null, LiveContentResponse.Status.TURN_COMPLETE, null)) + continue + } + try { + val serverContent = Json.decodeFromString(receivedJson) + val data = serverContent.serverContent.modelTurn.toPublic() + if (data.parts[0].asInlineDataPartOrNull()?.mimeType?.equals("audio/pcm") == true) { + emit(LiveContentResponse(data, LiveContentResponse.Status.NORMAL, null)) + } + if (data.parts[0] is TextPart) { + emit(LiveContentResponse(data, LiveContentResponse.Status.NORMAL, null)) + } + continue + } catch (e: Exception) { + Log.i(TAG, "Failed to decode server content: ${e.message}") + } + try { + val functionContent = Json.decodeFromString(receivedJson) + emit( + LiveContentResponse( + null, + LiveContentResponse.Status.NORMAL, + functionContent.toolCall.functionCalls.map { + FunctionCallPart(it.name, it.args.orEmpty().mapValues { x -> x.value ?: JsonNull }) + } + ) + ) + continue + } catch (e: Exception) { + Log.w(TAG, "Failed to decode function calling: ${e.message}") + } + } + } + } + + /** + * Sends the function calling responses to the server. + * + * @param functionList The list of [FunctionResponsePart] instances indicating the function + * response from the client. + */ + public suspend fun sendFunctionResponse(functionList: List) { + val jsonString = + Json.encodeToString( + ToolResponseSetup(functionList.map { it.toInternalFunctionCall() }).toInternal() + ) + session?.send(Frame.Text(jsonString)) + } + + /** + * Streams client data to the server. Calling this after [startAudioConversation] will play the + * response audio immediately. + * + * @param mediaChunks The list of [MediaData] instances representing the media data to be sent. + */ + public suspend fun sendMediaStream( + mediaChunks: List, + ) { + val jsonString = + Json.encodeToString(MediaStreamingSetup(mediaChunks.map { it.toInternal() }).toInternal()) + session?.send(Frame.Text(jsonString)) + } + + /** + * Sends data to the server. Calling this after [startAudioConversation] will play the response + * audio immediately. + * + * @param content Client [Content] to be sent to the server. + */ + public suspend fun send(content: Content) { + val jsonString = + Json.encodeToString(ClientContentSetup(listOf(content.toInternal()), true).toInternal()) + session?.send(Frame.Text(jsonString)) + } + + /** + * Sends text to the server. Calling this after [startAudioConversation] will play the response + * audio immediately. + * + * @param text Text to be sent to the server. + */ + public suspend fun send(text: String) { + send(Content.Builder().text(text).build()) + } + + /** Closes the client session. */ + public suspend fun close() { + session?.close() + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/MediaData.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/MediaData.kt new file mode 100644 index 00000000000..7e58c9cf43c --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/MediaData.kt @@ -0,0 +1,40 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import android.util.Base64 +import kotlinx.serialization.Serializable + +/** + * Represents the media data to be sent to the server + * + * @param data Byte array representing the data to be sent. + * @param mimeType an IANA standard MIME type. For supported MIME type values see the + * [Firebase documentation](https://firebase.google.com/docs/vertex-ai/input-file-requirements). + */ +@PublicPreviewAPI +public class MediaData(public val data: ByteArray, public val mimeType: String) { + @Serializable + internal class Internal( + val data: String, + val mimeType: String, + ) + + internal fun toInternal(): Internal { + return Internal(Base64.encodeToString(data, BASE_64_FLAGS), mimeType) + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Part.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Part.kt index a0a47cf79ee..21d3c0edc6c 100644 --- a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Part.kt +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Part.kt @@ -57,11 +57,11 @@ public class ImagePart(public val image: Bitmap) : Part public class InlineDataPart(public val inlineData: ByteArray, public val mimeType: String) : Part { @Serializable - internal data class Internal(@SerialName("inline_data") val inlineData: InlineData) : + internal data class Internal(@SerialName("inlineData") val inlineData: InlineData) : InternalPart { @Serializable - internal data class InlineData(@SerialName("mime_type") val mimeType: String, val data: Base64) + internal data class InlineData(@SerialName("mimeType") val mimeType: String, val data: Base64) } } @@ -95,6 +95,10 @@ public class FunctionResponsePart(public val name: String, public val response: @Serializable internal data class FunctionResponse(val name: String, val response: JsonObject) } + + internal fun toInternalFunctionCall(): Internal.FunctionResponse { + return Internal.FunctionResponse(this.name, this.response) + } } /** diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ResponseModality.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ResponseModality.kt new file mode 100644 index 00000000000..e8fe70db157 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/ResponseModality.kt @@ -0,0 +1,66 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import com.google.firebase.vertexai.common.util.FirstOrdinalSerializer +import kotlinx.serialization.KSerializer +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** Modality for bidirectional streaming. */ +@PublicPreviewAPI +public class ResponseModality private constructor(public val ordinal: Int) { + + @Serializable(Internal.Serializer::class) + internal enum class Internal { + @SerialName("MODALITY_UNSPECIFIED") UNSPECIFIED, + TEXT, + IMAGE, + AUDIO; + + internal object Serializer : KSerializer by FirstOrdinalSerializer(Internal::class) + + internal fun toPublic() = + when (this) { + TEXT -> ResponseModality.TEXT + IMAGE -> ResponseModality.IMAGE + AUDIO -> ResponseModality.AUDIO + else -> ResponseModality.UNSPECIFIED + } + } + + internal fun toInternal() = + when (this) { + TEXT -> "TEXT" + IMAGE -> "IMAGE" + AUDIO -> "AUDIO" + else -> "UNSPECIFIED" + } + public companion object { + /** Unspecified modality. */ + @JvmField public val UNSPECIFIED: ResponseModality = ResponseModality(0) + + /** Plain text. */ + @JvmField public val TEXT: ResponseModality = ResponseModality(1) + + /** Image. */ + @JvmField public val IMAGE: ResponseModality = ResponseModality(2) + + /** Audio. */ + @JvmField public val AUDIO: ResponseModality = ResponseModality(4) + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/SpeechConfig.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/SpeechConfig.kt new file mode 100644 index 00000000000..c304bb6a60e --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/SpeechConfig.kt @@ -0,0 +1,37 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** Speech configuration class for setting up the voice of the server's response. */ +@PublicPreviewAPI +public class SpeechConfig(public val voice: Voices) { + + @Serializable + internal data class Internal(@SerialName("voice_config") val voiceConfig: VoiceConfigInternal) { + @Serializable + internal data class VoiceConfigInternal( + @SerialName("prebuilt_voice_config") val prebuiltVoiceConfig: Voices.Internal, + ) + } + + internal fun toInternal(): Internal { + return Internal(Internal.VoiceConfigInternal(prebuiltVoiceConfig = voice.toInternal())) + } +} diff --git a/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Voices.kt b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Voices.kt new file mode 100644 index 00000000000..a9ca6390489 --- /dev/null +++ b/firebase-vertexai/src/main/kotlin/com/google/firebase/vertexai/type/Voices.kt @@ -0,0 +1,69 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.vertexai.type + +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable + +/** Various voices supported by the server */ +@PublicPreviewAPI +public class Voices private constructor(public val ordinal: Int) { + + @Serializable internal data class Internal(@SerialName("voice_name") val voiceName: String) + + @Serializable + internal enum class InternalEnum { + CHARON, + AOEDE, + FENRIR, + KORE, + PUCK; + internal fun toPublic() = + when (this) { + CHARON -> Voices.CHARON + AOEDE -> Voices.AOEDE + FENRIR -> Voices.FENRIR + KORE -> Voices.KORE + else -> Voices.PUCK + } + } + + internal fun toInternal(): Internal { + return when (this) { + CHARON -> Internal(InternalEnum.CHARON.name) + AOEDE -> Internal(InternalEnum.AOEDE.name) + FENRIR -> Internal(InternalEnum.FENRIR.name) + KORE -> Internal(InternalEnum.KORE.name) + else -> Internal(InternalEnum.PUCK.name) + } + } + + public companion object { + /** Unspecified modality. */ + @JvmField public val UNSPECIFIED: Voices = Voices(0) + + @JvmField public val CHARON: Voices = Voices(1) + + @JvmField public val AOEDE: Voices = Voices(2) + + @JvmField public val FENRIR: Voices = Voices(3) + + @JvmField public val KORE: Voices = Voices(4) + + @JvmField public val PUCK: Voices = Voices(5) + } +} diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 4c94f0378b2..44079b349e8 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -153,6 +153,7 @@ ktor-client-core = { module = "io.ktor:ktor-client-core", version.ref = "ktorVer ktor-client-logging = { module = "io.ktor:ktor-client-logging", version.ref = "ktorVersion" } ktor-client-mock = { module = "io.ktor:ktor-client-mock", version.ref = "ktorVersion" } ktor-client-okhttp = { module = "io.ktor:ktor-client-okhttp", version.ref = "ktorVersion" } +ktor-client-websockets = { module = "io.ktor:ktor-client-websockets", version.ref = "ktorVersion" } ktor-serialization-kotlinx-json = { module = "io.ktor:ktor-serialization-kotlinx-json", version.ref = "ktorVersion" } material = { module = "com.google.android.material:material", version.ref = "material" } maven-resolver-api = { module = "org.apache.maven.resolver:maven-resolver-api", version.ref = "mavenResolverApi" }