Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions homeassistant/components/alexa_devices/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo

async def async_unload_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
"""Unload a config entry."""
await entry.runtime_data.api.close()
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
coordinator = entry.runtime_data
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
await coordinator.api.close()

return unload_ok
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import asyncio
import mimetypes
from pathlib import Path
from types import MappingProxyType

from google.genai import Client
from google.genai.errors import APIError, ClientError
Expand Down Expand Up @@ -36,10 +37,13 @@

from .const import (
CONF_PROMPT,
DEFAULT_TITLE,
DEFAULT_TTS_NAME,
DOMAIN,
FILE_POLLING_INTERVAL_SECONDS,
LOGGER,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_TTS_OPTIONS,
TIMEOUT_MILLIS,
)

Expand Down Expand Up @@ -242,6 +246,16 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]

hass.config_entries.async_add_subentry(parent_entry, subentry)
if use_existing:
hass.config_entries.async_add_subentry(
parent_entry,
ConfigSubentry(
data=MappingProxyType(RECOMMENDED_TTS_OPTIONS),
subentry_type="tts",
title=DEFAULT_TTS_NAME,
unique_id=None,
),
)
conversation_entity = entity_registry.async_get_entity_id(
"conversation",
DOMAIN,
Expand Down Expand Up @@ -276,6 +290,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
else:
hass.config_entries.async_update_entry(
entry,
title=DEFAULT_TITLE,
options={},
version=2,
)
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,18 @@
CONF_TOP_P,
CONF_USE_GOOGLE_SEARCH_TOOL,
DEFAULT_CONVERSATION_NAME,
DEFAULT_TITLE,
DEFAULT_TTS_NAME,
DOMAIN,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_CONVERSATION_OPTIONS,
RECOMMENDED_HARM_BLOCK_THRESHOLD,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_TOP_K,
RECOMMENDED_TOP_P,
RECOMMENDED_TTS_MODEL,
RECOMMENDED_TTS_OPTIONS,
RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
TIMEOUT_MILLIS,
)
Expand All @@ -66,12 +71,6 @@
}
)

RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}


async def validate_input(data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect.
Expand Down Expand Up @@ -118,15 +117,21 @@ async def async_step_api(
data=user_input,
)
return self.async_create_entry(
title="Google Generative AI",
title=DEFAULT_TITLE,
data=user_input,
subentries=[
{
"subentry_type": "conversation",
"data": RECOMMENDED_OPTIONS,
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
}
},
{
"subentry_type": "tts",
"data": RECOMMENDED_TTS_OPTIONS,
"title": DEFAULT_TTS_NAME,
"unique_id": None,
},
],
)
return self.async_show_form(
Expand Down Expand Up @@ -172,10 +177,13 @@ def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {"conversation": ConversationSubentryFlowHandler}
return {
"conversation": LLMSubentryFlowHandler,
"tts": LLMSubentryFlowHandler,
}


class ConversationSubentryFlowHandler(ConfigSubentryFlow):
class LLMSubentryFlowHandler(ConfigSubentryFlow):
"""Flow for managing conversation subentries."""

last_rendered_recommended = False
Expand All @@ -202,7 +210,11 @@ async def async_step_set_options(

if user_input is None:
if self._is_new:
options = RECOMMENDED_OPTIONS.copy()
options: dict[str, Any]
if self._subentry_type == "tts":
options = RECOMMENDED_TTS_OPTIONS.copy()
else:
options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
else:
# If this is a reconfiguration, we need to copy the existing options
# so that we can show the current values in the form.
Expand All @@ -216,7 +228,7 @@ async def async_step_set_options(
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
if not user_input.get(CONF_LLM_HASS_API):
user_input.pop(CONF_LLM_HASS_API, None)
# Don't allow to save options that enable the Google Seearch tool with an Assist API
# Don't allow to save options that enable the Google Search tool with an Assist API
if not (
user_input.get(CONF_LLM_HASS_API)
and user_input.get(CONF_USE_GOOGLE_SEARCH_TOOL, False) is True
Expand All @@ -240,7 +252,7 @@ async def async_step_set_options(
options = user_input

schema = await google_generative_ai_config_option_schema(
self.hass, self._is_new, options, self._genai_client
self.hass, self._is_new, self._subentry_type, options, self._genai_client
)
return self.async_show_form(
step_id="set_options", data_schema=vol.Schema(schema), errors=errors
Expand All @@ -253,6 +265,7 @@ async def async_step_set_options(
async def google_generative_ai_config_option_schema(
hass: HomeAssistant,
is_new: bool,
subentry_type: str,
options: Mapping[str, Any],
genai_client: genai.Client,
) -> dict:
Expand All @@ -270,26 +283,39 @@ async def google_generative_ai_config_option_schema(
suggested_llm_apis = [suggested_llm_apis]

if is_new:
if CONF_NAME in options:
default_name = options[CONF_NAME]
elif subentry_type == "tts":
default_name = DEFAULT_TTS_NAME
else:
default_name = DEFAULT_CONVERSATION_NAME
schema: dict[vol.Required | vol.Optional, Any] = {
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
vol.Required(CONF_NAME, default=default_name): str,
}
else:
schema = {}

if subentry_type == "conversation":
schema.update(
{
vol.Optional(
CONF_PROMPT,
description={
"suggested_value": options.get(
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
)
},
): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": suggested_llm_apis},
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
}
)
schema.update(
{
vol.Optional(
CONF_PROMPT,
description={
"suggested_value": options.get(
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
)
},
): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
description={"suggested_value": suggested_llm_apis},
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
vol.Required(
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
): bool,
Expand All @@ -310,7 +336,7 @@ async def google_generative_ai_config_option_schema(
if (
api_model.display_name
and api_model.name
and "tts" not in api_model.name
and ("tts" in api_model.name) == (subentry_type == "tts")
and "vision" not in api_model.name
and api_model.supported_actions
and "generateContent" in api_model.supported_actions
Expand Down Expand Up @@ -341,12 +367,17 @@ async def google_generative_ai_config_option_schema(
)
)

if subentry_type == "tts":
default_model = RECOMMENDED_TTS_MODEL
else:
default_model = RECOMMENDED_CHAT_MODEL

schema.update(
{
vol.Optional(
CONF_CHAT_MODEL,
description={"suggested_value": options.get(CONF_CHAT_MODEL)},
default=RECOMMENDED_CHAT_MODEL,
default=default_model,
): SelectSelector(
SelectSelectorConfig(mode=SelectSelectorMode.DROPDOWN, options=models)
),
Expand Down Expand Up @@ -396,13 +427,18 @@ async def google_generative_ai_config_option_schema(
},
default=RECOMMENDED_HARM_BLOCK_THRESHOLD,
): harm_block_thresholds_selector,
vol.Optional(
CONF_USE_GOOGLE_SEARCH_TOOL,
description={
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
},
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
): bool,
}
)
if subentry_type == "conversation":
schema.update(
{
vol.Optional(
CONF_USE_GOOGLE_SEARCH_TOOL,
description={
"suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL),
},
default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL,
): bool,
}
)
return schema
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,21 @@

import logging

from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.helpers import llm

DOMAIN = "google_generative_ai_conversation"
DEFAULT_TITLE = "Google Generative AI"
LOGGER = logging.getLogger(__package__)
CONF_PROMPT = "prompt"

DEFAULT_CONVERSATION_NAME = "Google AI Conversation"
DEFAULT_TTS_NAME = "Google AI TTS"

ATTR_MODEL = "model"
CONF_RECOMMENDED = "recommended"
CONF_CHAT_MODEL = "chat_model"
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
RECOMMENDED_TTS_MODEL = "gemini-2.5-flash-preview-tts"
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_TOP_P = "top_p"
Expand All @@ -31,3 +35,12 @@

TIMEOUT_MILLIS = 10000
FILE_POLLING_INTERVAL_SECONDS = 0.05
RECOMMENDED_CONVERSATION_OPTIONS = {
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_RECOMMENDED: True,
}

RECOMMENDED_TTS_OPTIONS = {
CONF_RECOMMENDED: True,
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
"reconfigure": "Reconfigure conversation agent"
},
"entry_type": "Conversation agent",

"step": {
"set_options": {
"data": {
Expand Down Expand Up @@ -61,6 +60,34 @@
"error": {
"invalid_google_search_option": "Google Search can only be enabled if nothing is selected in the \"Control Home Assistant\" setting."
}
},
"tts": {
"initiate_flow": {
"user": "Add Text-to-Speech service",
"reconfigure": "Reconfigure Text-to-Speech service"
},
"entry_type": "Text-to-Speech",
"step": {
"set_options": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::recommended%]",
"chat_model": "[%key:common::generic::model%]",
"temperature": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::temperature%]",
"top_p": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_p%]",
"top_k": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::top_k%]",
"max_tokens": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::max_tokens%]",
"harassment_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::harassment_block_threshold%]",
"hate_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::hate_block_threshold%]",
"sexual_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::sexual_block_threshold%]",
"dangerous_block_threshold": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::step::set_options::data::dangerous_block_threshold%]"
}
}
},
"abort": {
"entry_not_loaded": "[%key:component::google_generative_ai_conversation::config_subentries::conversation::abort::entry_not_loaded%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
}
}
},
"services": {
Expand Down
Loading
Loading