Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions homeassistant/components/ecowitt/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
entity_category=EntityCategory.DIAGNOSTIC,
suggested_display_precision=1,
),
EcoWittSensorTypes.CO2_PPM: SensorEntityDescription(
key="CO2_PPM",
Expand Down Expand Up @@ -191,12 +192,14 @@
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
EcoWittSensorTypes.SPEED_MPH: SensorEntityDescription(
key="SPEED_MPH",
device_class=SensorDeviceClass.WIND_SPEED,
native_unit_of_measurement=UnitOfSpeed.MILES_PER_HOUR,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=1,
),
EcoWittSensorTypes.PRESSURE_HPA: SensorEntityDescription(
key="PRESSURE_HPA",
Expand Down
5 changes: 1 addition & 4 deletions homeassistant/components/home_connect/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,10 +655,7 @@ def refreshed_too_often_recently(self, appliance_ha_id: str) -> bool:
"times": str(MAX_EXECUTIONS),
"time_window": str(MAX_EXECUTIONS_TIME_WINDOW // 60),
"home_connect_resource_url": "https://www.home-connect.com/global/help-support/error-codes#/Togglebox=15362315-13320636-1/",
"home_assistant_core_new_issue_url": (
"https://github.com/home-assistant/core/issues/new?template=bug_report.yml"
f"&integration_name={DOMAIN}&integration_link=https://www.home-assistant.io/integrations/{DOMAIN}/"
),
"home_assistant_core_issue_url": "https://github.com/home-assistant/core/issues/147299",
},
)
return True
Expand Down
2 changes: 1 addition & 1 deletion homeassistant/components/home_connect/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@
"step": {
"confirm": {
"title": "[%key:component::home_connect::issues::home_connect_too_many_connected_paired_events::title%]",
"description": "The appliance \"{appliance_name}\" has been reported as connected or paired {times} times in less than {time_window} minutes, so refreshes on connected or paired events has been disabled to avoid exceeding the API rate limit.\n\nPlease refer to the [Home Connect Wi-Fi requirements and recommendations]({home_connect_resource_url}). If everything seems right with your network configuration, restart the appliance.\n\nClick \"submit\" to re-enable the updates.\nIf the issue persists, please create an issue in the [Home Assistant core repository]({home_assistant_core_new_issue_url})."
"description": "The appliance \"{appliance_name}\" has been reported as connected or paired {times} times in less than {time_window} minutes, so refreshes on connected or paired events has been disabled to avoid exceeding the API rate limit.\n\nPlease refer to the [Home Connect Wi-Fi requirements and recommendations]({home_connect_resource_url}). If everything seems right with your network configuration, restart the appliance.\n\nClick \"submit\" to re-enable the updates.\nIf the issue persists, please see the following issue in the [Home Assistant core repository]({home_assistant_core_issue_url})."
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions homeassistant/components/openai_conversation/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,6 +293,7 @@ async def _async_handle_chat_log(
"top_p": options.get(CONF_TOP_P, RECOMMENDED_TOP_P),
"temperature": options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
"user": chat_log.conversation_id,
"store": False,
"stream": True,
}
if tools:
Expand All @@ -304,8 +305,7 @@ async def _async_handle_chat_log(
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
)
}
else:
model_args["store"] = False
model_args["include"] = ["reasoning.encrypted_content"]

try:
result = await client.responses.create(**model_args)
Expand Down
2 changes: 1 addition & 1 deletion homeassistant/components/renson/fan.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ async def async_set_percentage(self, percentage: int) -> None:
all_data = self.coordinator.data
breeze_temp = self.api.get_field_value(all_data, BREEZE_TEMPERATURE_FIELD)
await self.hass.async_add_executor_job(
self.api.set_breeze, cmd.name, breeze_temp, True
self.api.set_breeze, cmd, breeze_temp, True
)
else:
await self.hass.async_add_executor_job(self.api.set_manual_level, cmd)
Expand Down
18 changes: 17 additions & 1 deletion tests/components/openai_conversation/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@

import pytest

from homeassistant.components.openai_conversation.const import DEFAULT_CONVERSATION_NAME
from homeassistant.components.openai_conversation.const import (
CONF_CHAT_MODEL,
DEFAULT_CONVERSATION_NAME,
)
from homeassistant.config_entries import ConfigSubentryData
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import HomeAssistant
Expand Down Expand Up @@ -59,6 +62,19 @@ def mock_config_entry_with_assist(
return mock_config_entry


@pytest.fixture
def mock_config_entry_with_reasoning_model(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry:
"""Mock a config entry with assist."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST, CONF_CHAT_MODEL: "o4-mini"},
)
return mock_config_entry


@pytest.fixture
async def mock_init_component(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
Expand Down
6 changes: 4 additions & 2 deletions tests/components/openai_conversation/test_conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,6 +499,7 @@ def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEven
summary=[],
type="reasoning",
status=None,
encrypted_content="AAA",
),
output_index=output_index,
sequence_number=0,
Expand All @@ -510,6 +511,7 @@ def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEven
summary=[],
type="reasoning",
status=None,
encrypted_content="AAABBB",
),
output_index=output_index,
sequence_number=0,
Expand Down Expand Up @@ -566,7 +568,7 @@ def create_web_search_item(id: str, output_index: int) -> list[ResponseStreamEve

async def test_function_call(
hass: HomeAssistant,
mock_config_entry_with_assist: MockConfigEntry,
mock_config_entry_with_reasoning_model: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
mock_chat_log: MockChatLog, # noqa: F811
Expand Down Expand Up @@ -617,7 +619,7 @@ async def test_function_call(
"id": "rs_A",
"summary": [],
"type": "reasoning",
"encrypted_content": None,
"encrypted_content": "AAABBB",
}
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE
# Don't test the prompt, as it's not deterministic
Expand Down
Loading