Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@ coreKtx = "1.15.0"
junit = "4.13.2"
junitVersion = "1.2.1"
espressoCore = "3.6.1"
kotlinxSerializationJson = "1.7.3"
lifecycleRuntimeKtx = "2.8.7"
activityCompose = "1.9.3"
composeBom = "2024.10.01"
reactiveStreams = "1.0.4"
vertexAI = "16.0.0-beta06"
vertexAI = "16.0.1"

[libraries]
androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" }
Expand All @@ -31,6 +32,7 @@ androidx-material3 = { group = "androidx.compose.material3", name = "material3"

# Vertex AI SDKs in Firebase
firebase-vertex-ai ={ group = "com.google.firebase", name = "firebase-vertexai", version.ref = "vertexAI" }
kotlinx-serialization-json = { module = "org.jetbrains.kotlinx:kotlinx-serialization-json", version.ref = "kotlinxSerializationJson" }
reactive-streams = { module = "org.reactivestreams:reactive-streams", version.ref = "reactiveStreams" }

[plugins]
Expand Down
2 changes: 1 addition & 1 deletion storage/app/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ plugins {
id("com.android.application")
id("kotlin-android")
id("com.google.gms.google-services")
id("com.google.devtools.ksp") version "2.0.21-1.0.26"
id("com.google.devtools.ksp") version "2.0.21-1.0.27"
}

android {
Expand Down
1 change: 1 addition & 0 deletions vertexai/app/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ dependencies {
implementation(libs.androidx.material3)

implementation(libs.firebase.vertex.ai)
implementation(libs.kotlinx.serialization.json)

// Required for one-shot operations (to use `ListenableFuture` from Reactive Streams)
implementation(libs.reactive.streams)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,10 +169,10 @@ void systemInstructionsText() {
/* modelName */ "gemini-1.5-pro-preview-0409",
/* generationConfig (optional) */ null,
/* safetySettings (optional) */ null,
/* requestOptions (optional) */ new RequestOptions(),
/* tools (optional) */ null,
/* toolsConfig (optional) */ null,
/* systemInstruction (optional) */ systemInstruction
/* systemInstruction (optional) */ systemInstruction,
/* requestOptions (optional) */ new RequestOptions()
);
// [END vertexai_si_text]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import com.google.firebase.vertexai.FirebaseVertexAI;
import com.google.firebase.vertexai.GenerativeModel;
import com.google.firebase.vertexai.java.GenerativeModelFutures;
import com.google.firebase.vertexai.type.BlockThreshold;
import com.google.firebase.vertexai.type.HarmBlockThreshold;
import com.google.firebase.vertexai.type.GenerationConfig;
import com.google.firebase.vertexai.type.HarmCategory;
import com.google.firebase.vertexai.type.RequestOptions;
Expand Down Expand Up @@ -39,7 +39,7 @@ void configModelParams() {

void configSafetySettings() {
SafetySetting harassmentSafety1 = new SafetySetting(HarmCategory.HARASSMENT,
BlockThreshold.ONLY_HIGH);
HarmBlockThreshold.ONLY_HIGH, null);

GenerativeModel gm1 = FirebaseVertexAI.Companion.getInstance().generativeModel(
"MODEL_NAME",
Expand All @@ -51,10 +51,10 @@ void configSafetySettings() {

// [START vertexai_safety_settings]
SafetySetting harassmentSafety = new SafetySetting(HarmCategory.HARASSMENT,
BlockThreshold.ONLY_HIGH);
HarmBlockThreshold.ONLY_HIGH, null);

SafetySetting hateSpeechSafety = new SafetySetting(HarmCategory.HATE_SPEECH,
BlockThreshold.MEDIUM_AND_ABOVE);
HarmBlockThreshold.MEDIUM_AND_ABOVE, null);

GenerativeModel gm = FirebaseVertexAI.Companion.getInstance().generativeModel(
"MODEL_NAME",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ void generateContentWithVideo(Executor executor, Uri videoUri) {
stream.close();

Content prompt = new Content.Builder()
.addBlob("video/mp4", videoBytes)
.addInlineData(videoBytes, "video/mp4")
.addText("What is in the video?")
.build();

Expand Down Expand Up @@ -298,7 +298,7 @@ void generateContentWithVideoStream(
stream.close();

Content prompt = new Content.Builder()
.addBlob("video/mp4", videoBytes)
.addInlineData(videoBytes, "video/mp4")
.addText("What is in the video?")
.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package com.google.firebase.example.vertexai.kotlin

import androidx.lifecycle.ViewModel
import com.google.firebase.Firebase
import com.google.firebase.vertexai.type.BlockThreshold
import com.google.firebase.vertexai.type.HarmBlockThreshold
import com.google.firebase.vertexai.type.HarmCategory
import com.google.firebase.vertexai.type.SafetySetting
import com.google.firebase.vertexai.type.generationConfig
Expand Down Expand Up @@ -30,13 +30,13 @@ class ConfigurationViewModel : ViewModel() {
val generativeModel1 = Firebase.vertexAI.generativeModel(
modelName = "MODEL_NAME",
safetySettings = listOf(
SafetySetting(HarmCategory.HARASSMENT, BlockThreshold.ONLY_HIGH)
SafetySetting(HarmCategory.HARASSMENT, HarmBlockThreshold.ONLY_HIGH)
)
)

// [START vertexai_safety_settings]
val harassmentSafety = SafetySetting(HarmCategory.HARASSMENT, BlockThreshold.ONLY_HIGH)
val hateSpeechSafety = SafetySetting(HarmCategory.HATE_SPEECH, BlockThreshold.MEDIUM_AND_ABOVE)
val harassmentSafety = SafetySetting(HarmCategory.HARASSMENT, HarmBlockThreshold.ONLY_HIGH)
val hateSpeechSafety = SafetySetting(HarmCategory.HATE_SPEECH, HarmBlockThreshold.MEDIUM_AND_ABOVE)

val generativeModel = Firebase.vertexAI.generativeModel(
modelName = "MODEL_NAME",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,90 +3,93 @@ package com.google.firebase.example.vertexai.kotlin
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.google.firebase.Firebase
import com.google.firebase.vertexai.GenerativeModel
import com.google.firebase.vertexai.type.FunctionResponsePart
import com.google.firebase.vertexai.type.InvalidStateException
import com.google.firebase.vertexai.type.FunctionDeclaration
import com.google.firebase.vertexai.type.Schema
import com.google.firebase.vertexai.type.Tool
import com.google.firebase.vertexai.type.content
import com.google.firebase.vertexai.type.defineFunction
import com.google.firebase.vertexai.vertexAI
import kotlinx.coroutines.launch
import org.json.JSONObject
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.JsonPrimitive
import kotlinx.serialization.json.jsonObject
import kotlinx.serialization.json.jsonPrimitive

class FunctionCallViewModel : ViewModel() {

// [START vertexai_fc_create_function]
suspend fun makeApiRequest(
currencyFrom: String,
currencyTo: String
): JSONObject {
// This hypothetical API returns a JSON such as:
// {"base":"USD","rates":{"SEK": 10.99}}
return JSONObject().apply {
put("base", currencyFrom)
put("rates", hashMapOf(currencyTo to 10.99))
}
// This function calls a hypothetical external API that returns
// a collection of weather information for a given location on a given date.
// `location` is an object of the form { city: string, state: string }
data class Location(val city: String, val state: String)

suspend fun fetchWeather(location: Location, date: String): JsonObject {

// TODO(developer): Write a standard function that would call to an external weather API.

// For demo purposes, this hypothetical response is hardcoded here in the expected format.
return JsonObject(mapOf(
"temperature" to JsonPrimitive(38),
"chancePrecipitation" to JsonPrimitive("56%"),
"cloudConditions" to JsonPrimitive("partlyCloudy")
))
}
// [END vertexai_fc_create_function]

// [START vertexai_fc_func_declaration]
val getExchangeRate = defineFunction(
name = "getExchangeRate",
description = "Get the exchange rate for currencies between countries",
Schema.str("currencyFrom", "The currency to convert from."),
Schema.str("currencyTo", "The currency to convert to.")
) { from, to ->
// Call the function that you declared above
makeApiRequest(from, to)
}
val fetchWeatherTool = FunctionDeclaration(
"fetchWeather",
"Get the weather conditions for a specific city on a specific date.",
mapOf(
"location" to Schema.obj(
mapOf(
"city" to Schema.string("The city of the location."),
"state" to Schema.string("The US state of the location."),
),
description = "The name of the city and its state for which " +
"to get the weather. Only cities in the " +
"USA are supported."
),
"date" to Schema.string("The date for which to get the weather." +
" Date must be in the format: YYYY-MM-DD."
),
),
)
// [END vertexai_fc_func_declaration]

// [START vertexai_fc_init]
// Initialize the Vertex AI service and the generative model
// Use a model that supports function calling, like Gemini 1.0 Pro.
val generativeModel = Firebase.vertexAI.generativeModel(
modelName = "gemini-1.0-pro",
// Specify the function declaration.
tools = listOf(Tool(listOf(getExchangeRate)))
// Use a model that supports function calling, like a Gemini 1.5 model
val model = Firebase.vertexAI.generativeModel(
modelName = "gemini-1.5-flash",
// Provide the function declaration to the model.
tools = listOf(Tool.functionDeclarations(listOf(fetchWeatherTool)))
)
// [END vertexai_fc_init]

// [START vertexai_fc_generate]
fun generateFunctionCall() {
viewModelScope.launch {
val chat = generativeModel.startChat()
val prompt = "What was the weather in Boston on October 17, 2024?"
val chat = model.startChat()
// Send the user's question (the prompt) to the model using multi-turn chat.
val result = chat.sendMessage(prompt)

val prompt = "How much is 50 US dollars worth in Swedish krona?"
val functionCalls = result.functionCalls
// When the model responds with one or more function calls, invoke the function(s).
val fetchWeatherCall = functionCalls.find { it.name == "fetchWeather" }

// Send the message to the generative model
var response = chat.sendMessage(prompt)

// Check if the model responded with a function call
response.functionCalls.firstOrNull()?.let { functionCall ->
// Try to retrieve the stored lambda from the model's tools and
// throw an exception if the returned function was not declared
val matchedFunction = generativeModel.tools?.flatMap { it.functionDeclarations }
?.first { it.name == functionCall.name }
?: throw InvalidStateException("Function not found: ${functionCall.name}")

// Call the lambda retrieved above
val apiResponse: JSONObject = matchedFunction.execute(functionCall)

// Send the API response back to the generative model
// so that it generates a text response that can be displayed to the user
response = chat.sendMessage(
content(role = "function") {
part(FunctionResponsePart(functionCall.name, apiResponse))
}
// Forward the structured input data prepared by the model
// to the hypothetical external API.
val functionResponse = fetchWeatherCall?.let {
// Alternatively, if your `Location` class is marked as @Serializable, you can use
// val location = Json.decodeFromJsonElement<Location>(it.args["location"]!!)
val location = Location(
it.args["location"]!!.jsonObject["city"]!!.jsonPrimitive.content,
it.args["location"]!!.jsonObject["state"]!!.jsonPrimitive.content
)
}

// Whenever the model responds with text, show it in the UI
response.text?.let { modelResponse ->
println(modelResponse)
val date = it.args["date"]!!.jsonPrimitive.content
fetchWeather(location, date)
}
}
}
// [END vertexai_fc_generate]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ class GenerateContentViewModel : ViewModel() {
val bytes = stream.readBytes()

val prompt = content {
blob("video/mp4", bytes)
inlineData(bytes, "video/mp4")
text("What is in the video?")
}

Expand All @@ -181,7 +181,7 @@ class GenerateContentViewModel : ViewModel() {
val bytes = stream.readBytes()

val prompt = content {
blob("video/mp4", bytes)
inlineData(bytes, "video/mp4")
text("What is in the video?")
}

Expand Down
Loading