diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 4e49d6cec7ee01..24268f4f4e24a3 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -616,34 +616,44 @@ async def async_enable_logging( ), ) - # Log errors to a file if we have write access to file or config dir + logger = logging.getLogger() + logger.setLevel(logging.INFO if verbose else logging.WARNING) + if log_file is None: - err_log_path = hass.config.path(ERROR_LOG_FILENAME) + default_log_path = hass.config.path(ERROR_LOG_FILENAME) + if "SUPERVISOR" in os.environ: + _LOGGER.info("Running in Supervisor, not logging to file") + # Rename the default log file if it exists, since previous versions created + # it even on Supervisor + if os.path.isfile(default_log_path): + with contextlib.suppress(OSError): + os.rename(default_log_path, f"{default_log_path}.old") + err_log_path = None + else: + err_log_path = default_log_path else: err_log_path = os.path.abspath(log_file) - err_path_exists = os.path.isfile(err_log_path) - err_dir = os.path.dirname(err_log_path) - - # Check if we can write to the error log if it exists or that - # we can create files in the containing directory if not. - if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( - not err_path_exists and os.access(err_dir, os.W_OK) - ): - err_handler = await hass.async_add_executor_job( - _create_log_file, err_log_path, log_rotate_days - ) + if err_log_path: + err_path_exists = os.path.isfile(err_log_path) + err_dir = os.path.dirname(err_log_path) - err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) + # Check if we can write to the error log if it exists or that + # we can create files in the containing directory if not. + if (err_path_exists and os.access(err_log_path, os.W_OK)) or ( + not err_path_exists and os.access(err_dir, os.W_OK) + ): + err_handler = await hass.async_add_executor_job( + _create_log_file, err_log_path, log_rotate_days + ) - logger = logging.getLogger() - logger.addHandler(err_handler) - logger.setLevel(logging.INFO if verbose else logging.WARNING) + err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) + logger.addHandler(err_handler) - # Save the log file location for access by other components. - hass.data[DATA_LOGGING] = err_log_path - else: - _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path) + # Save the log file location for access by other components. + hass.data[DATA_LOGGING] = err_log_path + else: + _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path) async_activate_log_queue_handler(hass) diff --git a/homeassistant/components/analytics/analytics.py b/homeassistant/components/analytics/analytics.py index 2b67592e2f9224..6a2943ccd89739 100644 --- a/homeassistant/components/analytics/analytics.py +++ b/homeassistant/components/analytics/analytics.py @@ -505,7 +505,7 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]: DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications() -async def async_devices_payload(hass: HomeAssistant) -> dict: +async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901 """Return detailed information about entities and devices.""" dev_reg = dr.async_get(hass) ent_reg = er.async_get(hass) @@ -513,6 +513,8 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: integration_inputs: dict[str, tuple[list[str], list[str]]] = {} integration_configs: dict[str, AnalyticsModifications] = {} + removed_devices: set[str] = set() + # Get device list for device_entry in dev_reg.devices.values(): if not device_entry.primary_config_entry: @@ -525,6 +527,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: if config_entry is None: continue + if device_entry.entry_type is dr.DeviceEntryType.SERVICE: + removed_devices.add(device_entry.id) + continue + integration_domain = config_entry.domain integration_input = integration_inputs.setdefault(integration_domain, ([], [])) @@ -614,11 +620,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: device_config = integration_config.devices.get(device_id, device_config) if device_config.remove: + removed_devices.add(device_id) continue device_entry = dev_reg.devices[device_id] - device_id_mapping[device_entry.id] = (integration_domain, len(devices_info)) + device_id_mapping[device_id] = (integration_domain, len(devices_info)) devices_info.append( { @@ -669,7 +676,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: entity_entry = ent_reg.entities[entity_id] - entity_state = hass.states.get(entity_entry.entity_id) + entity_state = hass.states.get(entity_id) entity_info = { # LIMITATION: `assumed_state` can be overridden by users; @@ -690,15 +697,19 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: "unit_of_measurement": entity_entry.unit_of_measurement, } - if ( - ((device_id_ := entity_entry.device_id) is not None) - and ((new_device_id := device_id_mapping.get(device_id_)) is not None) - and (new_device_id[0] == integration_domain) - ): - device_info = devices_info[new_device_id[1]] - device_info["entities"].append(entity_info) - else: - entities_info.append(entity_info) + if (device_id_ := entity_entry.device_id) is not None: + if device_id_ in removed_devices: + # The device was removed, so we remove the entity too + continue + + if ( + new_device_id := device_id_mapping.get(device_id_) + ) is not None and (new_device_id[0] == integration_domain): + device_info = devices_info[new_device_id[1]] + device_info["entities"].append(entity_info) + continue + + entities_info.append(entity_info) return { "version": "home-assistant:1", diff --git a/homeassistant/components/bayesian/binary_sensor.py b/homeassistant/components/bayesian/binary_sensor.py index d09e55de77db31..6d3dbb7f244825 100644 --- a/homeassistant/components/bayesian/binary_sensor.py +++ b/homeassistant/components/bayesian/binary_sensor.py @@ -272,6 +272,13 @@ async def async_setup_entry( observations: list[ConfigType] = [ dict(subentry.data) for subentry in config_entry.subentries.values() ] + + for observation in observations: + if observation[CONF_PLATFORM] == CONF_TEMPLATE: + observation[CONF_VALUE_TEMPLATE] = Template( + observation[CONF_VALUE_TEMPLATE], hass + ) + prior: float = config[CONF_PRIOR] probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index b3bc9b8c067e52..040f6c3a863772 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "entity", "quality_scale": "internal", - "requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"] + "requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"] } diff --git a/homeassistant/components/esphome/analytics.py b/homeassistant/components/esphome/analytics.py new file mode 100644 index 00000000000000..d801bfeb31fed7 --- /dev/null +++ b/homeassistant/components/esphome/analytics.py @@ -0,0 +1,11 @@ +"""Analytics platform.""" + +from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications +from homeassistant.core import HomeAssistant + + +async def async_modify_analytics( + hass: HomeAssistant, analytics_input: AnalyticsInput +) -> AnalyticsModifications: + """Modify the analytics.""" + return AnalyticsModifications(remove=True) diff --git a/homeassistant/components/firefly_iii/manifest.json b/homeassistant/components/firefly_iii/manifest.json index 18f9f794331558..59aea7c3c2fc69 100644 --- a/homeassistant/components/firefly_iii/manifest.json +++ b/homeassistant/components/firefly_iii/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/firefly_iii", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["pyfirefly==0.1.5"] + "requirements": ["pyfirefly==0.1.6"] } diff --git a/homeassistant/components/homeassistant_connect_zbt2/config_flow.py b/homeassistant/components/homeassistant_connect_zbt2/config_flow.py index 49243e5a97dfba..34af7b6168a1a1 100644 --- a/homeassistant/components/homeassistant_connect_zbt2/config_flow.py +++ b/homeassistant/components/homeassistant_connect_zbt2/config_flow.py @@ -10,6 +10,7 @@ from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, + ResetTarget, ) from homeassistant.config_entries import ( ConfigEntry, @@ -67,6 +68,11 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol): context: ConfigFlowContext + # `rts_dtr` targets older adapters, `baudrate` works for newer ones. The reason we + # try them in this order is that on older adapters `baudrate` entered the ESP32-S3 + # bootloader instead of the MG24 bootloader. + BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE] + async def async_step_install_zigbee_firmware( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/homeassistant_connect_zbt2/update.py b/homeassistant/components/homeassistant_connect_zbt2/update.py index 24ddf417180420..6c8819a7da9659 100644 --- a/homeassistant/components/homeassistant_connect_zbt2/update.py +++ b/homeassistant/components/homeassistant_connect_zbt2/update.py @@ -16,6 +16,7 @@ from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, + ResetTarget, ) from homeassistant.components.update import UpdateDeviceClass from homeassistant.config_entries import ConfigEntry @@ -156,7 +157,7 @@ async def async_setup_entry( class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """Connect ZBT-2 firmware update entity.""" - bootloader_reset_type = None + bootloader_reset_methods = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE] def __init__( self, diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index 20b817fe2c50f2..284e7611f2f588 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -39,6 +39,7 @@ FirmwareInfo, OwningAddon, OwningIntegration, + ResetTarget, async_flash_silabs_firmware, get_otbr_addon_manager, guess_firmware_info, @@ -79,6 +80,8 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC): """Base flow to install firmware.""" ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override + BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override + _picked_firmware_type: PickedFirmwareType _zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED @@ -274,7 +277,7 @@ async def _install_firmware( device=self._device, fw_data=fw_data, expected_installed_firmware_type=expected_installed_firmware_type, - bootloader_reset_type=None, + bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS, progress_callback=lambda offset, total: self.async_update_progress( offset / total ), diff --git a/homeassistant/components/homeassistant_hardware/manifest.json b/homeassistant/components/homeassistant_hardware/manifest.json index 26d227ae922d86..510c1fc6d6cf8a 100644 --- a/homeassistant/components/homeassistant_hardware/manifest.json +++ b/homeassistant/components/homeassistant_hardware/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware", "integration_type": "system", "requirements": [ - "universal-silabs-flasher==0.0.32", + "universal-silabs-flasher==0.0.34", "ha-silabs-firmware-client==0.2.0" ] } diff --git a/homeassistant/components/homeassistant_hardware/update.py b/homeassistant/components/homeassistant_hardware/update.py index 831d9f3f4da78c..81c02360bd26a6 100644 --- a/homeassistant/components/homeassistant_hardware/update.py +++ b/homeassistant/components/homeassistant_hardware/update.py @@ -22,7 +22,12 @@ from .coordinator import FirmwareUpdateCoordinator from .helpers import async_register_firmware_info_callback -from .util import ApplicationType, FirmwareInfo, async_flash_silabs_firmware +from .util import ( + ApplicationType, + FirmwareInfo, + ResetTarget, + async_flash_silabs_firmware, +) _LOGGER = logging.getLogger(__name__) @@ -81,7 +86,7 @@ class BaseFirmwareUpdateEntity( # Subclasses provide the mapping between firmware types and entity descriptions entity_description: FirmwareUpdateEntityDescription - bootloader_reset_type: str | None = None + bootloader_reset_methods: list[ResetTarget] = [] _attr_supported_features = ( UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS @@ -268,7 +273,7 @@ async def async_install( device=self._current_device, fw_data=fw_data, expected_installed_firmware_type=self.entity_description.expected_firmware_type, - bootloader_reset_type=self.bootloader_reset_type, + bootloader_reset_methods=self.bootloader_reset_methods, progress_callback=self._update_progress, ) finally: diff --git a/homeassistant/components/homeassistant_hardware/util.py b/homeassistant/components/homeassistant_hardware/util.py index d3bddad97545a3..278cc19151664d 100644 --- a/homeassistant/components/homeassistant_hardware/util.py +++ b/homeassistant/components/homeassistant_hardware/util.py @@ -4,13 +4,16 @@ import asyncio from collections import defaultdict -from collections.abc import AsyncIterator, Callable, Iterable +from collections.abc import AsyncIterator, Callable, Iterable, Sequence from contextlib import AsyncExitStack, asynccontextmanager from dataclasses import dataclass from enum import StrEnum import logging -from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType +from universal_silabs_flasher.const import ( + ApplicationType as FlasherApplicationType, + ResetTarget as FlasherResetTarget, +) from universal_silabs_flasher.firmware import parse_firmware_image from universal_silabs_flasher.flasher import Flasher @@ -59,6 +62,18 @@ def as_flasher_application_type(self) -> FlasherApplicationType: return FlasherApplicationType(self.value) +class ResetTarget(StrEnum): + """Methods to reset a device into bootloader mode.""" + + RTS_DTR = "rts_dtr" + BAUDRATE = "baudrate" + YELLOW = "yellow" + + def as_flasher_reset_target(self) -> FlasherResetTarget: + """Convert the reset target enum into one compatible with USF.""" + return FlasherResetTarget(self.value) + + @singleton(OTBR_ADDON_MANAGER_DATA) @callback def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager: @@ -342,7 +357,7 @@ async def async_flash_silabs_firmware( device: str, fw_data: bytes, expected_installed_firmware_type: ApplicationType, - bootloader_reset_type: str | None = None, + bootloader_reset_methods: Sequence[ResetTarget] = (), progress_callback: Callable[[int, int], None] | None = None, ) -> FirmwareInfo: """Flash firmware to the SiLabs device.""" @@ -359,7 +374,9 @@ async def async_flash_silabs_firmware( ApplicationType.SPINEL.as_flasher_application_type(), ApplicationType.CPC.as_flasher_application_type(), ), - bootloader_reset=bootloader_reset_type, + bootloader_reset=tuple( + m.as_flasher_reset_target() for m in bootloader_reset_methods + ), ) async with AsyncExitStack() as stack: diff --git a/homeassistant/components/homeassistant_sky_connect/update.py b/homeassistant/components/homeassistant_sky_connect/update.py index df69b6d40a23f2..eab9fc232a43cd 100644 --- a/homeassistant/components/homeassistant_sky_connect/update.py +++ b/homeassistant/components/homeassistant_sky_connect/update.py @@ -168,7 +168,8 @@ async def async_setup_entry( class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """SkyConnect firmware update entity.""" - bootloader_reset_type = None + # The ZBT-1 does not have a hardware bootloader trigger + bootloader_reset_methods = [] def __init__( self, diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index 8339a3562b33a7..821ba48eee76b3 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -27,6 +27,7 @@ from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, + ResetTarget, probe_silabs_firmware_info, ) from homeassistant.config_entries import ( @@ -83,6 +84,8 @@ async def _install_firmware_step( class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol): """Mixin for Home Assistant Yellow firmware methods.""" + BOOTLOADER_RESET_METHODS = [ResetTarget.YELLOW] + async def async_step_install_zigbee_firmware( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/homeassistant_yellow/update.py b/homeassistant/components/homeassistant_yellow/update.py index 7a6e2f19b1f0c7..d86ac93a8489cc 100644 --- a/homeassistant/components/homeassistant_yellow/update.py +++ b/homeassistant/components/homeassistant_yellow/update.py @@ -16,6 +16,7 @@ from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, + ResetTarget, ) from homeassistant.components.update import UpdateDeviceClass from homeassistant.config_entries import ConfigEntry @@ -173,7 +174,7 @@ async def async_setup_entry( class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): """Yellow firmware update entity.""" - bootloader_reset_type = "yellow" # Triggers a GPIO reset + bootloader_reset_methods = [ResetTarget.YELLOW] # Triggers a GPIO reset def __init__( self, diff --git a/homeassistant/components/portainer/manifest.json b/homeassistant/components/portainer/manifest.json index bb285dd37b9062..22aea63c129e4c 100644 --- a/homeassistant/components/portainer/manifest.json +++ b/homeassistant/components/portainer/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/portainer", "iot_class": "local_polling", "quality_scale": "bronze", - "requirements": ["pyportainer==0.1.7"] + "requirements": ["pyportainer==1.0.2"] } diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 962a314f8eb342..0fcec2942614ec 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -552,8 +552,15 @@ def percentage_to_brightness(percentage: int) -> int: def mac_address_from_name(name: str) -> str | None: """Convert a name to a mac address.""" - mac = name.partition(".")[0].partition("-")[-1] - return mac.upper() if len(mac) == 12 else None + base = name.split(".", 1)[0] + if "-" not in base: + return None + + mac = base.rsplit("-", 1)[-1] + if len(mac) != 12 or not all(char in "0123456789abcdefABCDEF" for char in mac): + return None + + return mac.upper() def get_release_url(gen: int, model: str, beta: bool) -> str | None: diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index 1e602061c2c02f..1771716b64d33c 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -63,12 +63,14 @@ async def async_setup_entry( SERVICE_SET_AUTO_OFF_NAME, SERVICE_SET_AUTO_OFF_SCHEMA, "async_set_auto_off_service", + entity_device_classes=(SwitchDeviceClass.SWITCH,), ) platform.async_register_entity_service( SERVICE_TURN_ON_WITH_TIMER_NAME, SERVICE_TURN_ON_WITH_TIMER_SCHEMA, "async_turn_on_with_timer_service", + entity_device_classes=(SwitchDeviceClass.SWITCH,), ) @callback @@ -135,22 +137,6 @@ async def async_turn_off(self, **kwargs: Any) -> None: self._attr_is_on = self.control_result = False self.async_write_ha_state() - async def async_set_auto_off_service(self, auto_off: timedelta) -> None: - """Use for handling setting device auto-off service calls.""" - _LOGGER.warning( - "Service '%s' is not supported by %s", - SERVICE_SET_AUTO_OFF_NAME, - self.coordinator.name, - ) - - async def async_turn_on_with_timer_service(self, timer_minutes: int) -> None: - """Use for turning device on with a timer service calls.""" - _LOGGER.warning( - "Service '%s' is not supported by %s", - SERVICE_TURN_ON_WITH_TIMER_NAME, - self.coordinator.name, - ) - class SwitcherPowerPlugSwitchEntity(SwitcherBaseSwitchEntity): """Representation of a Switcher power plug switch entity.""" diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 8ca270c0cc2bbf..a6b45cbd086369 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -320,7 +320,9 @@ async def async_step_manual_port_config( } ) - if await self._radio_mgr.radio_type.controller.probe(user_input): + if await self._radio_mgr.radio_type.controller.probe( + self._radio_mgr.device_settings + ): return await self.async_step_verify_radio() errors["base"] = "cannot_connect" diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index 91be9c3b3b483f..71709fdc43dc8d 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -4,7 +4,7 @@ "step": { "choose_serial_port": { "title": "Select a serial port", - "description": "Select the serial port for your Zigbee radio", + "description": "Select the serial port for your Zigbee adapter", "data": { "path": "Serial device path" } @@ -16,10 +16,10 @@ "description": "Do you want to set up {name}?" }, "manual_pick_radio_type": { - "title": "Select a radio type", - "description": "Pick your Zigbee radio type", + "title": "Select an adapter type", + "description": "Pick your Zigbee adapter type", "data": { - "radio_type": "Radio type" + "radio_type": "Adapter type" } }, "manual_port_config": { @@ -37,8 +37,8 @@ } }, "verify_radio": { - "title": "Radio is not recommended", - "description": "The radio you are using ({name}) is not recommended and support for it may be removed in the future. Please see the Zigbee Home Automation integration's documentation for [a list of recommended adapters]({docs_recommended_adapters_url})." + "title": "Adapter is not recommended", + "description": "The adapter you are using ({name}) is not recommended and support for it may be removed in the future. Please see the Zigbee Home Automation integration's documentation for [a list of recommended adapters]({docs_recommended_adapters_url})." }, "choose_setup_strategy": { "title": "Set up Zigbee", @@ -70,11 +70,11 @@ }, "choose_formation_strategy": { "title": "Network formation", - "description": "Choose the network settings for your radio.", + "description": "Choose the network settings for your adapter.", "menu_options": { "form_new_network": "Erase network settings and create a new network", "form_initial_network": "Create a network", - "reuse_settings": "Keep radio network settings", + "reuse_settings": "Keep adapter network settings", "choose_automatic_backup": "Restore an automatic backup", "upload_manual_backup": "Upload a manual backup" }, @@ -101,10 +101,10 @@ } }, "maybe_confirm_ezsp_restore": { - "title": "Overwrite radio IEEE address", - "description": "Your backup has a different IEEE address than your radio. For your network to function properly, the IEEE address of your radio should also be changed.\n\nThis is a permanent operation.", + "title": "Overwrite adapter IEEE address", + "description": "Your backup has a different IEEE address than your adapter. For your network to function properly, the IEEE address of your adapter should also be changed.\n\nThis is a permanent operation.", "data": { - "overwrite_coordinator_ieee": "Permanently replace the radio IEEE address" + "overwrite_coordinator_ieee": "Permanently replace the adapter IEEE address" } } }, @@ -133,7 +133,7 @@ }, "prompt_migrate_or_reconfigure": { "title": "Migrate or re-configure", - "description": "Are you migrating to a new radio or re-configuring the current radio?", + "description": "Are you migrating to a new adapter or re-configuring the current adapter?", "menu_options": { "intent_migrate": "Migrate to a new adapter", "intent_reconfigure": "Re-configure the current adapter" @@ -584,12 +584,12 @@ }, "issues": { "wrong_silabs_firmware_installed_nabucasa": { - "title": "Zigbee radio with multiprotocol firmware detected", - "description": "Your Zigbee radio was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}). \n Option 1: To run your radio exclusively with ZHA, you need to install the Zigbee firmware:\n - Open the documentation by selecting the link under \"Learn More\".\n - Follow the instructions described in Step 2 (and Step 2 only) to 'Flash the Silicon Labs radio Zigbee firmware'.\n Option 2: To run your radio with multiprotocol, follow these steps: \n - Go to Settings > System > Hardware, select the device and select Configure. \n - Select the Configure IEEE 802.15.4 radio multiprotocol support option. \n - Select the checkbox and select Submit. \n - Once installed, configure the newly discovered ZHA integration." + "title": "Zigbee adapter with multiprotocol firmware detected", + "description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install the Zigbee firmware:\n - Go to Settings > System > Hardware, select the device and select Configure.\n - Select the 'Migrate Zigbee to a new adapter' option and follow the instructions." }, "wrong_silabs_firmware_installed_other": { "title": "[%key:component::zha::issues::wrong_silabs_firmware_installed_nabucasa::title%]", - "description": "Your Zigbee radio was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}). To run your radio exclusively with ZHA, you need to install Zigbee firmware. Follow your Zigbee radio manufacturer's instructions for how to do this." + "description": "Your Zigbee adapter was previously used with multiprotocol (Zigbee and Thread) and still has multiprotocol firmware installed: ({firmware_type}).\n\nTo run your adapter exclusively with ZHA, you need to install Zigbee firmware. Follow your Zigbee adapter manufacturer's instructions for how to do this." }, "inconsistent_network_settings": { "title": "Zigbee network settings have changed", @@ -597,7 +597,7 @@ "step": { "init": { "title": "[%key:component::zha::issues::inconsistent_network_settings::title%]", - "description": "Your Zigbee radio's network settings are inconsistent with the most recent network backup. This usually happens if another Zigbee integration (e.g. Zigbee2MQTT or deCONZ) has overwritten them.\n\n{diff}\n\nIf you did not intentionally change your network settings, restore from the most recent backup: your devices will not work otherwise.", + "description": "Your Zigbee adapter's network settings are inconsistent with the most recent network backup. This usually happens if another Zigbee integration (e.g. Zigbee2MQTT or deCONZ) has overwritten them.\n\n{diff}\n\nIf you did not intentionally change your network settings, restore from the most recent backup: your devices will not work otherwise.", "menu_options": { "use_new_settings": "Keep the new settings", "restore_old_settings": "Restore backup (recommended)" diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0d5bb38b09a433..da5649185ddc78 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -40,7 +40,7 @@ hass-nabucasa==1.2.0 hassil==3.2.0 home-assistant-bluetooth==1.13.1 home-assistant-frontend==20251001.0 -home-assistant-intents==2025.9.24 +home-assistant-intents==2025.10.1 httpx==0.28.1 ifaddr==0.2.0 Jinja2==3.1.6 diff --git a/requirements_all.txt b/requirements_all.txt index 3d7bf35af033e8..eef7754c626399 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1189,7 +1189,7 @@ holidays==0.81 home-assistant-frontend==20251001.0 # homeassistant.components.conversation -home-assistant-intents==2025.9.24 +home-assistant-intents==2025.10.1 # homeassistant.components.homematicip_cloud homematicip==2.3.0 @@ -2021,7 +2021,7 @@ pyfibaro==0.8.3 pyfido==2.1.2 # homeassistant.components.firefly_iii -pyfirefly==0.1.5 +pyfirefly==0.1.6 # homeassistant.components.fireservicerota pyfireservicerota==0.0.46 @@ -2293,7 +2293,7 @@ pyplaato==0.0.19 pypoint==3.0.0 # homeassistant.components.portainer -pyportainer==0.1.7 +pyportainer==1.0.2 # homeassistant.components.probe_plus pyprobeplus==1.0.1 @@ -3060,7 +3060,7 @@ unifi_ap==0.0.2 unifiled==0.11 # homeassistant.components.homeassistant_hardware -universal-silabs-flasher==0.0.32 +universal-silabs-flasher==0.0.34 # homeassistant.components.upb upb-lib==0.6.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5814c86efb9512..99e8b57e6a07d6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1038,7 +1038,7 @@ holidays==0.81 home-assistant-frontend==20251001.0 # homeassistant.components.conversation -home-assistant-intents==2025.9.24 +home-assistant-intents==2025.10.1 # homeassistant.components.homematicip_cloud homematicip==2.3.0 @@ -1690,7 +1690,7 @@ pyfibaro==0.8.3 pyfido==2.1.2 # homeassistant.components.firefly_iii -pyfirefly==0.1.5 +pyfirefly==0.1.6 # homeassistant.components.fireservicerota pyfireservicerota==0.0.46 @@ -1917,7 +1917,7 @@ pyplaato==0.0.19 pypoint==3.0.0 # homeassistant.components.portainer -pyportainer==0.1.7 +pyportainer==1.0.2 # homeassistant.components.probe_plus pyprobeplus==1.0.1 @@ -2531,7 +2531,7 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.homeassistant_hardware -universal-silabs-flasher==0.0.32 +universal-silabs-flasher==0.0.34 # homeassistant.components.upb upb-lib==0.6.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index a9f0aacdae106c..c127f5ae51ebb3 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -32,7 +32,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.8.9,source=/uv,target=/bin/uv \ go2rtc-client==0.2.1 \ ha-ffmpeg==3.2.2 \ hassil==3.2.0 \ - home-assistant-intents==2025.9.24 \ + home-assistant-intents==2025.10.1 \ mutagen==1.47.0 \ pymicro-vad==1.0.1 \ pyspeex-noise==1.0.2 diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index be8f38901ee442..feffc952a49d50 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -1085,17 +1085,6 @@ async def test_devices_payload_no_entities( "sw_version": "test-sw-version", "via_device": None, }, - { - "entities": [], - "entry_type": "service", - "has_configuration_url": False, - "hw_version": None, - "manufacturer": "test-manufacturer", - "model": None, - "model_id": "test-model-id", - "sw_version": None, - "via_device": None, - }, { "entities": [], "entry_type": None, @@ -1160,6 +1149,13 @@ async def test_devices_payload_with_entities( manufacturer="test-manufacturer", model_id="test-model-id", ) + device_entry_3 = device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={("device", "3")}, + manufacturer="test-manufacturer", + model_id="test-model-id", + entry_type=dr.DeviceEntryType.SERVICE, + ) # First device @@ -1209,6 +1205,14 @@ async def test_devices_payload_with_entities( device_id=device_entry_2.id, ) + # Third device (service type) + entity_registry.async_get_or_create( + domain="light", + platform="hue", + unique_id="4", + device_id=device_entry_3.id, + ) + # Entity without device with unit of measurement and state class entity_registry.async_get_or_create( domain="sensor", diff --git a/tests/components/bayesian/test_binary_sensor.py b/tests/components/bayesian/test_binary_sensor.py index b0d81af228cb8a..b7b3d24c6e4946 100644 --- a/tests/components/bayesian/test_binary_sensor.py +++ b/tests/components/bayesian/test_binary_sensor.py @@ -8,11 +8,15 @@ from homeassistant import config as hass_config from homeassistant.components.bayesian import binary_sensor as bayesian -from homeassistant.components.bayesian.const import DOMAIN +from homeassistant.components.bayesian.const import ( + DEFAULT_PROBABILITY_THRESHOLD, + DOMAIN, +) from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, SERVICE_UPDATE_ENTITY, ) +from homeassistant.config_entries import ConfigSubentryData from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_RELOAD, @@ -26,7 +30,7 @@ from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component -from tests.common import get_fixture_path +from tests.common import MockConfigEntry, get_fixture_path async def test_load_values_when_added_to_hass(hass: HomeAssistant) -> None: @@ -131,7 +135,64 @@ async def test_sensor_numeric_state( assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_sensor_numeric_state(hass, issue_registry) + + +async def test_sensor_numeric_state_config_entry( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test sensor on template platform observations.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.5, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_monitored", + "below": 10, + "above": 5, + "prob_given_true": 0.7, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_monitored1", + "below": 7, + "above": 5, + "prob_given_true": 0.9, + "prob_given_false": 0.2, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ], + title="Test_Binary", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + await _test_sensor_numeric_state(hass, issue_registry) + + +async def _test_sensor_numeric_state( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: hass.states.async_set("sensor.test_monitored", 6) await hass.async_block_till_done() @@ -224,6 +285,47 @@ async def test_sensor_state(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_sensor_state(hass, prior) + + +async def test_sensor_state_config_entry(hass: HomeAssistant) -> None: + """Test sensor on template platform observations.""" + prior = 0.2 + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": prior, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "off", + "prob_given_true": 0.8, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + title="Test_Binary", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_sensor_state(hass, prior) + + +async def _test_sensor_state(hass: HomeAssistant, prior: float) -> None: + """Common test code for state-based observations.""" hass.states.async_set("sensor.test_monitored", "on") await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") @@ -295,6 +397,44 @@ async def test_sensor_value_template(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_sensor_value_template(hass) + + +async def test_sensor_value_template_config_entry(hass: HomeAssistant) -> None: + """Test sensor on template platform observations.""" + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{states('sensor.test_monitored') == 'off'}}", + "prob_given_true": 0.8, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + title="Test_Binary", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + await _test_sensor_value_template(hass) + + +async def _test_sensor_value_template(hass: HomeAssistant) -> None: hass.states.async_set("sensor.test_monitored", "on") state = hass.states.get("binary_sensor.test_binary") @@ -361,7 +501,71 @@ async def test_mixed_states(hass: HomeAssistant) -> None: } assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_mixed_states(hass) + + +async def test_mixed_states_config_entry(hass: HomeAssistant) -> None: + """Test sensor on template platform observations.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "should_HVAC", + "prior": 0.3, + "probability_threshold": 0.5, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{states('sensor.guest_sensor') != 'off'}}", + "prob_given_true": 0.3, + "prob_given_false": 0.15, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.anyone_home", + "to_state": "on", + "prob_given_true": 0.6, + "prob_given_false": 0.05, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.temperature", + "below": 24, + "above": 19, + "prob_given_true": 0.1, + "prob_given_false": 0.6, + "name": "observation_3", + }, + subentry_type="observation", + title="observation_3", + unique_id=None, + ), + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_mixed_states(hass) + +async def _test_mixed_states(hass: HomeAssistant) -> None: + """Common test code for mixed states.""" hass.states.async_set("sensor.guest_sensor", "UNKNOWN") hass.states.async_set("sensor.anyone_home", "on") hass.states.async_set("sensor.temperature", 15) @@ -417,7 +621,49 @@ async def test_threshold(hass: HomeAssistant, issue_registry: ir.IssueRegistry) assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_threshold(hass, issue_registry) + +async def test_threshold_config_entry( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test sensor on template platform observations.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.5, + "probability_threshold": 1, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "on", + "prob_given_true": 1.0, + "prob_given_false": 0.0, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_threshold(hass, issue_registry) + + +async def _test_threshold( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Common test code for threshold testing.""" hass.states.async_set("sensor.test_monitored", "on") await hass.async_block_till_done() @@ -435,7 +681,7 @@ async def test_multiple_observations(hass: HomeAssistant) -> None: Before the merge of #67631 this practice was a common work-around for bayesian's ignoring of negative observations, this also preserves that function """ - + prior = 0.2 config = { "binary_sensor": { "name": "Test_Binary", @@ -456,14 +702,66 @@ async def test_multiple_observations(hass: HomeAssistant) -> None: "prob_given_false": 0.6, }, ], - "prior": 0.2, + "prior": prior, "probability_threshold": 0.32, } } assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_multiple_observations(hass, prior) + + +async def test_multiple_observations_config_entry(hass: HomeAssistant) -> None: + """Test sensor on multiple observations.""" + prior = 0.2 + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": prior, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "blue", + "prob_given_true": 0.8, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "red", + "prob_given_true": 0.2, + "prob_given_false": 0.6, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + await _test_multiple_observations(hass, prior) + + +async def _test_multiple_observations(hass: HomeAssistant, prior: float) -> None: + """Common test code for multiple observations.""" hass.states.async_set("sensor.test_monitored", "off") await hass.async_block_till_done() @@ -472,7 +770,7 @@ async def test_multiple_observations(hass: HomeAssistant) -> None: for attrs in state.attributes.values(): json.dumps(attrs) assert state.attributes.get("occurred_observation_entities") == [] - assert state.attributes.get("probability") == 0.2 + assert state.attributes.get("probability") == prior # probability should be the same as the prior as negative observations are ignored in multi-state assert state.state == "off" @@ -565,7 +863,104 @@ async def test_multiple_numeric_observations( } assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_multiple_numeric_observations(hass, issue_registry) + +async def test_multiple_numeric_observations_config_entry( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test sensor on multiple numeric state observations.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "nice_day", + "prior": 0.3, + "probability_threshold": DEFAULT_PROBABILITY_THRESHOLD, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 0, + "prob_given_true": 0.05, + "prob_given_false": 0.2, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 10, + "above": 0, + "prob_given_true": 0.1, + "prob_given_false": 0.25, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 15, + "above": 10, + "prob_given_true": 0.2, + "prob_given_false": 0.35, + "name": "observation_3", + }, + subentry_type="observation", + title="observation_3", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "below": 25, + "above": 15, + "prob_given_true": 0.5, + "prob_given_false": 0.15, + "name": "observation_4", + }, + subentry_type="observation", + title="observation_4", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_temp", + "above": 25, + "prob_given_true": 0.15, + "prob_given_false": 0.05, + "name": "observation_5", + }, + subentry_type="observation", + title="observation_5", + unique_id=None, + ), + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_multiple_numeric_observations(hass, issue_registry) + + +async def _test_multiple_numeric_observations( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Common test code for multiple numeric state observations.""" hass.states.async_set("sensor.test_temp", -5) await hass.async_block_till_done() @@ -777,6 +1172,152 @@ async def test_mirrored_observations( assert len(issue_registry.issues) == 0 assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + + await _test_mirrored_observations(hass, issue_registry) + + +async def test_mirrored_observations_config_entry( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test sensor on legacy mirrored observations.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.1, + "probability_threshold": DEFAULT_PROBABILITY_THRESHOLD, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "binary_sensor.test_monitored", + "to_state": "on", + "prob_given_true": 0.8, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "binary_sensor.test_monitored", + "to_state": "off", + "prob_given_true": 0.2, + "prob_given_false": 0.59, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_monitored1", + "above": 5, + "prob_given_true": 0.7, + "prob_given_false": 0.4, + "name": "observation_3", + }, + subentry_type="observation", + title="observation_3", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "numeric_state", + "entity_id": "sensor.test_monitored1", + "below": 5, + "prob_given_true": 0.3, + "prob_given_false": 0.6, + "name": "observation_4", + }, + subentry_type="observation", + title="observation_4", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{states('sensor.test_monitored2') == 'off'}}", + "prob_given_true": 0.79, + "prob_given_false": 0.4, + "name": "observation_5", + }, + subentry_type="observation", + title="observation_5", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{states('sensor.test_monitored2') == 'on'}}", + "prob_given_true": 0.2, + "prob_given_false": 0.6, + "name": "observation_6", + }, + subentry_type="observation", + title="observation_6", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.colour", + "to_state": "blue", + "prob_given_true": 0.33, + "prob_given_false": 0.8, + "name": "observation_7", + }, + subentry_type="observation", + title="observation_7", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.colour", + "to_state": "green", + "prob_given_true": 0.3, + "prob_given_false": 0.15, + "name": "observation_8", + }, + subentry_type="observation", + title="observation_8", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.colour", + "to_state": "red", + "prob_given_true": 0.4, + "prob_given_false": 0.05, + "name": "observation_9", + }, + subentry_type="observation", + title="observation_9", + unique_id=None, + ), + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_mirrored_observations(hass, issue_registry) + + +async def _test_mirrored_observations( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Common test code for mirrored observations.""" hass.states.async_set("sensor.test_monitored2", "on") await hass.async_block_till_done() @@ -792,7 +1333,7 @@ async def test_mirrored_observations( async def test_missing_prob_given_false( hass: HomeAssistant, issue_registry: ir.IssueRegistry ) -> None: - """Test whether missing prob_given_false are detected and appropriate issues are created.""" + """Test whether missing prob_given_false in YAML are detected and appropriate issues are created.""" config = { "binary_sensor": { @@ -840,7 +1381,7 @@ async def test_bad_multi_numeric( issue_registry: ir.IssueRegistry, caplog: pytest.LogCaptureFixture, ) -> None: - """Test whether missing prob_given_false are detected and appropriate issues are created.""" + """Test whether overlaps are detected in YAML configs, in Config Entries this is detected during the config flow and is tested elsewhere.""" config = { "binary_sensor": { @@ -902,7 +1443,7 @@ async def test_inverted_numeric( issue_registry: ir.IssueRegistry, caplog: pytest.LogCaptureFixture, ) -> None: - """Test whether missing prob_given_false are detected and appropriate logs are created.""" + """Test whether inverted numeric states are detected in YAML configs, for config entries this is detected during config flow validation and so is tested elsewhere.""" config = { "binary_sensor": { @@ -934,7 +1475,7 @@ async def test_no_value_numeric( issue_registry: ir.IssueRegistry, caplog: pytest.LogCaptureFixture, ) -> None: - """Test whether missing prob_given_false are detected and appropriate logs are created.""" + """Tests whether numeric states with no above or below are detected in YAML configs, for config entries this is detected during config flow validation and so is tested elsewhere.""" config = { "binary_sensor": { @@ -978,7 +1519,7 @@ async def test_probability_updates(hass: HomeAssistant) -> None: async def test_observed_entities(hass: HomeAssistant) -> None: - """Test sensor on observed entities.""" + """Test the observation attributes.""" config = { "binary_sensor": { "name": "Test_Binary", @@ -1009,6 +1550,63 @@ async def test_observed_entities(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_observed_entities( + hass, + ) + + +async def test_observed_entities_config_entry(hass: HomeAssistant) -> None: + """Test the observation attributes using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "off", + "prob_given_true": 0.9, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ), + ConfigSubentryData( + data={ + "platform": "template", + "value_template": ( + "{{is_state('sensor.test_monitored1','on') and" + " is_state('sensor.test_monitored','off')}}" + ), + "prob_given_true": 0.9, + "prob_given_false": 0.1, + "name": "observation_2", + }, + subentry_type="observation", + title="observation_2", + unique_id=None, + ), + ], + title="Test_Binary", + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_observed_entities(hass) + + +async def _test_observed_entities(hass: HomeAssistant) -> None: + """Common test code for occurred_observation_entities. This test reveals some interesting historic behaviour - the last entity to update a template is the one that is recorded as having made the observation.""" hass.states.async_set("sensor.test_monitored", "on") await hass.async_block_till_done() hass.states.async_set("sensor.test_monitored1", "off") @@ -1124,6 +1722,48 @@ async def test_template_error( await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_template_error(hass, caplog) + + +async def test_template_error_config_entry( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test template sensor with template error using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{ xyz + 1 }}", + "prob_given_true": 0.9, + "prob_given_false": 0.1, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_template_error(hass, caplog) + + +async def _test_template_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Common test code for template error.""" assert hass.states.get("binary_sensor.test_binary").state == "off" assert "TemplateError" in caplog.text @@ -1150,6 +1790,45 @@ async def test_update_request_with_template(hass: HomeAssistant) -> None: } await async_setup_component(hass, "binary_sensor", config) + + await _test_update_request_with_template(hass) + + +async def test_update_request_with_template_config_entry(hass: HomeAssistant) -> None: + """Test template sensor with template error using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{states('sensor.test_monitored') == 'off'}}", + "prob_given_true": 0.8, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_update_request_with_template(hass) + + +async def _test_update_request_with_template(hass: HomeAssistant) -> None: + """Common test code for template update.""" await async_setup_component(hass, HA_DOMAIN, {}) await hass.async_block_till_done() @@ -1167,7 +1846,7 @@ async def test_update_request_with_template(hass: HomeAssistant) -> None: async def test_update_request_without_template(hass: HomeAssistant) -> None: - """Test sensor on template platform observations that gets an update request.""" + """Test sensor on state platform observations that gets an update request.""" config = { "binary_sensor": { "name": "Test_Binary", @@ -1187,6 +1866,48 @@ async def test_update_request_without_template(hass: HomeAssistant) -> None: } await async_setup_component(hass, "binary_sensor", config) + + await _test_update_request_without_template(hass) + + +async def test_update_request_without_template_config_entry( + hass: HomeAssistant, +) -> None: + """Test template sensor with template error using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "off", + "prob_given_true": 0.9, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_update_request_without_template(hass) + + +async def _test_update_request_without_template(hass: HomeAssistant) -> None: + """Common test code for state update.""" await async_setup_component(hass, HA_DOMAIN, {}) await hass.async_block_till_done() @@ -1207,7 +1928,8 @@ async def test_update_request_without_template(hass: HomeAssistant) -> None: async def test_monitored_sensor_goes_away(hass: HomeAssistant) -> None: - """Test sensor on template platform observations that goes away.""" + """Test sensor on state platform observations that goes away.""" + prior = 0.2 config = { "binary_sensor": { "name": "Test_Binary", @@ -1221,12 +1943,56 @@ async def test_monitored_sensor_goes_away(hass: HomeAssistant) -> None: "prob_given_false": 0.4, }, ], - "prior": 0.2, + "prior": prior, "probability_threshold": 0.32, } } await async_setup_component(hass, "binary_sensor", config) + + await _test_monitored_sensor_goes_away(hass, prior) + + +async def test_monitored_sensor_goes_away_config_entry( + hass: HomeAssistant, +) -> None: + """Test template sensor with template error using config entry.""" + prior = 0.2 + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": prior, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "on", + "prob_given_true": 0.9, + "prob_given_false": 0.4, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_monitored_sensor_goes_away(hass, prior) + + +async def _test_monitored_sensor_goes_away(hass: HomeAssistant, prior: float) -> None: + """Common test code for state update.""" + await async_setup_component(hass, HA_DOMAIN, {}) await hass.async_block_till_done() @@ -1238,17 +2004,24 @@ async def test_monitored_sensor_goes_away(hass: HomeAssistant) -> None: # Calculated using bayes theorum where P(A) = 0.2, P(B|A) = 0.9, P(B|notA) = 0.4 -> 0.36 (>0.32) hass.states.async_remove("sensor.test_monitored") - await hass.async_block_till_done() + + assert ( + hass.states.get("binary_sensor.test_binary").attributes.get("probability") + == prior + ) + assert hass.states.get("binary_sensor.test_binary").state == "off" + + hass.states.async_set("sensor.test_monitored", STATE_UNAVAILABLE) assert ( hass.states.get("binary_sensor.test_binary").attributes.get("probability") - == 0.2 + == prior ) assert hass.states.get("binary_sensor.test_binary").state == "off" async def test_reload(hass: HomeAssistant) -> None: - """Verify we can reload bayesian sensors.""" + """Verify we can reload YAML bayesian sensors.""" config = { "binary_sensor": { @@ -1315,6 +2088,47 @@ async def test_template_triggers(hass: HomeAssistant) -> None: await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_template_triggers(hass) + + +async def test_template_triggers_config_entry( + hass: HomeAssistant, +) -> None: + """Test template sensor with template error using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "template", + "value_template": "{{ states.input_boolean.test.state }}", + "prob_given_true": 1.0, + "prob_given_false": 0.0, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_template_triggers(hass) + + +async def _test_template_triggers(hass: HomeAssistant) -> None: + """Common test code for template triggers.""" + assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF events = [] @@ -1357,6 +2171,46 @@ async def test_state_triggers(hass: HomeAssistant) -> None: await async_setup_component(hass, "binary_sensor", config) await hass.async_block_till_done() + await _test_state_triggers(hass) + + +async def test_state_triggers_config_entry( + hass: HomeAssistant, +) -> None: + """Test template sensor with template error using config entry.""" + config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test_Binary", + "prior": 0.2, + "probability_threshold": 0.32, + }, + subentries_data=[ + ConfigSubentryData( + data={ + "platform": "state", + "entity_id": "sensor.test_monitored", + "to_state": "off", + "prob_given_true": 0.9999, + "prob_given_false": 0.9994, + "name": "observation_1", + }, + subentry_type="observation", + title="observation_1", + unique_id=None, + ) + ], + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await _test_state_triggers(hass) + + +async def _test_state_triggers(hass: HomeAssistant) -> None: assert hass.states.get("binary_sensor.test_binary").state == STATE_OFF events = [] diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index 456202a63a43df..c04dd242e61fef 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -1,6 +1,6 @@ """The test for light device automation.""" -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, patch import attr import pytest @@ -1088,7 +1088,7 @@ async def test_automation_with_dynamically_validated_condition( module_cache = hass.data[loader.DATA_COMPONENTS] module = module_cache["fake_integration.device_condition"] - module.async_validate_condition_config = AsyncMock() + module.async_validate_condition_config = AsyncMock(return_value=MagicMock()) config_entry = MockConfigEntry(domain="fake_integration", data={}) config_entry.mock_state(hass, ConfigEntryState.LOADED) diff --git a/tests/components/esphome/test_analytics.py b/tests/components/esphome/test_analytics.py new file mode 100644 index 00000000000000..f4de75b2ee0c63 --- /dev/null +++ b/tests/components/esphome/test_analytics.py @@ -0,0 +1,31 @@ +"""Tests for analytics platform.""" + +import pytest + +from homeassistant.components.analytics import async_devices_payload +from homeassistant.components.esphome import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.mark.asyncio +async def test_analytics( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test the analytics platform.""" + await async_setup_component(hass, "analytics", {}) + + config_entry = MockConfigEntry(domain=DOMAIN, data={}) + config_entry.add_to_hass(hass) + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + identifiers={(DOMAIN, "test")}, + manufacturer="Test Manufacturer", + ) + + result = await async_devices_payload(hass) + assert DOMAIN not in result["integrations"] diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index f1890073567d24..4f2c072703a288 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -9,7 +9,6 @@ import pytest from homeassistant.components import history -from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE @@ -377,8 +376,9 @@ async def set_state(entity_id, state, **kwargs): return zero, four, states +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history.""" await async_setup_component(hass, "history", {}) @@ -389,9 +389,9 @@ async def test_fetch_period_api( assert response.status == HTTPStatus.OK +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_use_include_order( hass: HomeAssistant, - recorder_mock: Recorder, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -408,8 +408,9 @@ async def test_fetch_period_api_with_use_include_order( assert "The 'use_include_order' option is deprecated" in caplog.text +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_minimal_response( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history with minimal_response.""" now = dt_util.utcnow() @@ -450,8 +451,9 @@ async def test_fetch_period_api_with_minimal_response( ).replace('"', "") +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_no_timestamp( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history with no timestamp.""" await async_setup_component(hass, "history", {}) @@ -460,9 +462,9 @@ async def test_fetch_period_api_with_no_timestamp( assert response.status == HTTPStatus.OK +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_include_order( hass: HomeAssistant, - recorder_mock: Recorder, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -488,8 +490,9 @@ async def test_fetch_period_api_with_include_order( assert "The 'include' option is deprecated" in caplog.text +@pytest.mark.usefixtures("recorder_mock") async def test_entity_ids_limit_via_api( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test limiting history to entity_ids.""" await async_setup_component( @@ -514,8 +517,9 @@ async def test_entity_ids_limit_via_api( assert response_json[1][0]["entity_id"] == "light.cow" +@pytest.mark.usefixtures("recorder_mock") async def test_entity_ids_limit_via_api_with_skip_initial_state( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test limiting history to entity_ids with skip_initial_state.""" await async_setup_component( @@ -548,8 +552,9 @@ async def test_entity_ids_limit_via_api_with_skip_initial_state( assert response_json[1][0]["entity_id"] == "light.cow" +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_before_history_started( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history for the far past.""" await async_setup_component( @@ -569,8 +574,9 @@ async def test_fetch_period_api_before_history_started( assert response_json == [] +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_far_future( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history for the far future.""" await async_setup_component( @@ -590,8 +596,9 @@ async def test_fetch_period_api_far_future( assert response_json == [] +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_invalid_datetime( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history with an invalid date time.""" await async_setup_component( @@ -609,8 +616,9 @@ async def test_fetch_period_api_with_invalid_datetime( assert response_json == {"message": "Invalid datetime"} +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_invalid_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history with an invalid end time.""" await async_setup_component( @@ -631,8 +639,9 @@ async def test_fetch_period_api_invalid_end_time( assert response_json == {"message": "Invalid end_time"} +@pytest.mark.usefixtures("recorder_mock") async def test_entity_ids_limit_via_api_with_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test limiting history to entity_ids with end_time.""" await async_setup_component( @@ -677,8 +686,9 @@ async def test_entity_ids_limit_via_api_with_end_time( assert response_json[1][0]["entity_id"] == "light.cow" +@pytest.mark.usefixtures("recorder_mock") async def test_fetch_period_api_with_no_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the fetch period view for history with minimal_response.""" await async_setup_component(hass, "history", {}) @@ -730,9 +740,9 @@ async def test_fetch_period_api_with_no_entity_ids( ("cow", HTTPStatus.BAD_REQUEST, "message", "Invalid filter_entity_id"), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_history_with_invalid_entity_ids( hass: HomeAssistant, - recorder_mock: Recorder, hass_client: ClientSessionGenerator, filter_entity_id, status_code, diff --git a/tests/components/history/test_websocket_api.py b/tests/components/history/test_websocket_api.py index 01b49ad55752f2..a4d47f19c4d91a 100644 --- a/tests/components/history/test_websocket_api.py +++ b/tests/components/history/test_websocket_api.py @@ -9,7 +9,6 @@ from homeassistant.components import history from homeassistant.components.history import websocket_api -from homeassistant.components.recorder import Recorder from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.event import async_track_state_change_event @@ -39,8 +38,9 @@ def test_setup() -> None: # Verification occurs in the fixture +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period.""" now = dt_util.utcnow() @@ -173,8 +173,9 @@ async def test_history_during_period( assert sensor_test_history[2]["a"] == {"any": "attr"} +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period_impossible_conditions( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period returns when condition cannot be true.""" await async_setup_component(hass, "history", {}) @@ -235,9 +236,9 @@ async def test_history_during_period_impossible_conditions( @pytest.mark.parametrize( "time_zone", ["UTC", "Europe/Berlin", "America/Chicago", "US/Hawaii"] ) +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period_significant_domain( hass: HomeAssistant, - recorder_mock: Recorder, hass_ws_client: WebSocketGenerator, time_zone, ) -> None: @@ -403,8 +404,9 @@ async def test_history_during_period_significant_domain( assert "lc" not in sensor_test_history[0] # skipped if the same a last_updated (lu) +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period_bad_start_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period bad state time.""" await async_setup_component( @@ -427,8 +429,9 @@ async def test_history_during_period_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period_bad_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period bad end time.""" now = dt_util.utcnow() @@ -454,8 +457,9 @@ async def test_history_during_period_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_historical_only( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream.""" now = dt_util.utcnow() @@ -543,8 +547,9 @@ async def test_history_stream_historical_only( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_significant_domain_historical_only( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test the stream with climate domain with historical states only.""" now = dt_util.utcnow() @@ -744,8 +749,9 @@ async def test_history_stream_significant_domain_historical_only( assert "lc" not in sensor_test_history[0] # skipped if the same a last_updated (lu) +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_bad_start_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream bad state time.""" await async_setup_component( @@ -768,8 +774,9 @@ async def test_history_stream_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_end_time_before_start_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with an end_time before the start_time.""" end_time = dt_util.utcnow() - timedelta(seconds=2) @@ -796,8 +803,9 @@ async def test_history_stream_end_time_before_start_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_bad_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream bad end time.""" now = dt_util.utcnow() @@ -823,8 +831,9 @@ async def test_history_stream_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_no_attributes_minimal_response( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data and no_attributes and minimal_response.""" now = dt_util.utcnow() @@ -916,8 +925,9 @@ async def test_history_stream_live_no_attributes_minimal_response( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data.""" now = dt_util.utcnow() @@ -1029,8 +1039,9 @@ async def test_history_stream_live( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_minimal_response( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data and minimal_response.""" now = dt_util.utcnow() @@ -1134,8 +1145,9 @@ async def test_history_stream_live_minimal_response( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_no_attributes( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data and no_attributes.""" now = dt_util.utcnow() @@ -1235,8 +1247,9 @@ async def test_history_stream_live_no_attributes( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_no_attributes_minimal_response_specific_entities( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data and no_attributes and minimal_response with specific entities.""" now = dt_util.utcnow() @@ -1329,8 +1342,9 @@ async def test_history_stream_live_no_attributes_minimal_response_specific_entit } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_with_future_end_time( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history and live data with future end time.""" now = dt_util.utcnow() @@ -1438,9 +1452,9 @@ async def test_history_stream_live_with_future_end_time( @pytest.mark.parametrize("include_start_time_state", [True, False]) +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_before_history_starts( hass: HomeAssistant, - recorder_mock: Recorder, hass_ws_client: WebSocketGenerator, include_start_time_state, ) -> None: @@ -1489,8 +1503,9 @@ async def test_history_stream_before_history_starts( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_for_entity_with_no_possible_changes( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream for future with no possible changes where end time is less than or equal to now.""" await async_setup_component( @@ -1540,8 +1555,9 @@ async def test_history_stream_for_entity_with_no_possible_changes( } +@pytest.mark.usefixtures("recorder_mock") async def test_overflow_queue( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test overflowing the history stream queue.""" now = dt_util.utcnow() @@ -1627,8 +1643,9 @@ async def test_overflow_queue( ) == listeners_without_writes(init_listeners) +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period_for_invalid_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period for valid and invalid entity ids.""" now = dt_util.utcnow() @@ -1786,8 +1803,9 @@ async def test_history_during_period_for_invalid_entity_ids( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_for_invalid_entity_ids( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream for invalid and valid entity ids.""" @@ -1964,8 +1982,9 @@ async def test_history_stream_for_invalid_entity_ids( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_historical_only_with_start_time_state_past( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream.""" await async_setup_component( @@ -2075,8 +2094,9 @@ async def test_history_stream_historical_only_with_start_time_state_past( } +@pytest.mark.usefixtures("recorder_mock") async def test_history_stream_live_chained_events( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history stream with history with a chained event.""" now = dt_util.utcnow() diff --git a/tests/components/history/test_websocket_api_schema_32.py b/tests/components/history/test_websocket_api_schema_32.py index c9577e20fcff36..8e13f44b822838 100644 --- a/tests/components/history/test_websocket_api_schema_32.py +++ b/tests/components/history/test_websocket_api_schema_32.py @@ -5,7 +5,6 @@ import pytest from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -25,8 +24,9 @@ def db_schema_32(hass: HomeAssistant) -> Generator[None]: yield +@pytest.mark.usefixtures("recorder_mock") async def test_history_during_period( - hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test history_during_period.""" now = dt_util.utcnow() diff --git a/tests/components/homeassistant_connect_zbt2/test_config_flow.py b/tests/components/homeassistant_connect_zbt2/test_config_flow.py index 54f70c57c4900a..62a34bc1d3555a 100644 --- a/tests/components/homeassistant_connect_zbt2/test_config_flow.py +++ b/tests/components/homeassistant_connect_zbt2/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Home Assistant Connect ZBT-2 config flow.""" from collections.abc import Generator -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, Mock, call, patch import pytest @@ -243,23 +243,18 @@ async def test_options_flow( assert description_placeholders["firmware_type"] == "spinel" assert description_placeholders["model"] == model - async def mock_install_firmware_step( - self, - fw_update_url: str, - fw_type: str, - firmware_name: str, - expected_installed_firmware_type: ApplicationType, - step_id: str, - next_step_id: str, - ) -> ConfigFlowResult: - self._probed_firmware_info = FirmwareInfo( - device=usb_data.device, - firmware_type=expected_installed_firmware_type, - firmware_version="7.4.4.0 build 0", - owners=[], - source="probe", - ) - return await getattr(self, f"async_step_{next_step_id}")() + mock_update_client = AsyncMock() + mock_manifest = Mock() + mock_firmware = Mock() + mock_firmware.filename = "zbt2_zigbee_ncp_7.4.4.0.gbl" + mock_firmware.metadata = { + "ezsp_version": "7.4.4.0", + "fw_type": "zbt2_zigbee_ncp", + "metadata_version": 2, + } + mock_manifest.firmwares = [mock_firmware] + mock_update_client.async_update_data.return_value = mock_manifest + mock_update_client.async_fetch_firmware.return_value = b"firmware_data" with ( patch( @@ -267,9 +262,42 @@ async def mock_install_firmware_step( return_value=[], ), patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow._install_firmware_step", - autospec=True, - side_effect=mock_install_firmware_step, + "homeassistant.components.homeassistant_hardware.firmware_config_flow.FirmwareUpdateClient", + return_value=mock_update_client, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.async_flash_silabs_firmware", + return_value=FirmwareInfo( + device=usb_data.device, + firmware_type=ApplicationType.EZSP, + firmware_version="7.4.4.0 build 0", + owners=[], + source="probe", + ), + ) as flash_mock, + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_info", + side_effect=[ + # First call: probe before installation (returns current SPINEL firmware) + FirmwareInfo( + device=usb_data.device, + firmware_type=ApplicationType.SPINEL, + firmware_version="2.4.4.0", + owners=[], + source="probe", + ), + # Second call: probe after installation (returns new EZSP firmware) + FirmwareInfo( + device=usb_data.device, + firmware_type=ApplicationType.EZSP, + firmware_version="7.4.4.0 build 0", + owners=[], + source="probe", + ), + ], + ), + patch( + "homeassistant.components.homeassistant_hardware.util.parse_firmware_image" ), ): pick_result = await hass.config_entries.options.async_configure( @@ -298,6 +326,13 @@ async def mock_install_firmware_step( "vid": usb_data.vid, } + # Verify async_flash_silabs_firmware was called with ZBT-2's reset methods + assert flash_mock.call_count == 1 + assert flash_mock.mock_calls[0].kwargs["bootloader_reset_methods"] == [ + "rts_dtr", + "baudrate", + ] + async def test_duplicate_discovery(hass: HomeAssistant) -> None: """Test config flow unique_id deduplication.""" diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py index 34c6cfb7f8048e..267fa389d91dfc 100644 --- a/tests/components/homeassistant_hardware/test_config_flow.py +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Home Assistant hardware firmware config flow.""" import asyncio -from collections.abc import AsyncGenerator, Awaitable, Callable, Iterator +from collections.abc import AsyncGenerator, Awaitable, Callable, Iterator, Sequence import contextlib from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, call, patch @@ -25,6 +25,7 @@ from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, + ResetTarget, ) from homeassistant.config_entries import ( SOURCE_IGNORE, @@ -299,7 +300,7 @@ async def mock_flash_firmware( device: str, fw_data: bytes, expected_installed_firmware_type: ApplicationType, - bootloader_reset_type: str | None = None, + bootloader_reset_methods: Sequence[ResetTarget] = (), progress_callback: Callable[[int, int], None] | None = None, ) -> FirmwareInfo: await asyncio.sleep(0) diff --git a/tests/components/homeassistant_hardware/test_update.py b/tests/components/homeassistant_hardware/test_update.py index 3103e5cfc6aa85..5f99d64c1b1bf9 100644 --- a/tests/components/homeassistant_hardware/test_update.py +++ b/tests/components/homeassistant_hardware/test_update.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable, Sequence import dataclasses import logging from unittest.mock import Mock, patch @@ -29,6 +29,7 @@ ApplicationType, FirmwareInfo, OwningIntegration, + ResetTarget, ) from homeassistant.components.update import UpdateDeviceClass from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow @@ -197,7 +198,7 @@ async def mock_async_setup_update_entities( class MockFirmwareUpdateEntity(BaseFirmwareUpdateEntity): """Mock SkyConnect firmware update entity.""" - bootloader_reset_type = None + bootloader_reset_methods = [] def __init__( self, @@ -361,7 +362,7 @@ async def mock_flash_firmware( device: str, fw_data: bytes, expected_installed_firmware_type: ApplicationType, - bootloader_reset_type: str | None = None, + bootloader_reset_methods: Sequence[ResetTarget] = (), progress_callback: Callable[[int, int], None] | None = None, ) -> FirmwareInfo: await asyncio.sleep(0) diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py index 048bf998d1317c..e9c20ffb8d6037 100644 --- a/tests/components/homeassistant_hardware/test_util.py +++ b/tests/components/homeassistant_hardware/test_util.py @@ -580,7 +580,7 @@ async def mock_flash_firmware( patch( "homeassistant.components.homeassistant_hardware.util.Flasher", return_value=mock_flasher, - ), + ) as flasher_mock, patch( "homeassistant.components.homeassistant_hardware.util.parse_firmware_image" ), @@ -594,13 +594,17 @@ async def mock_flash_firmware( device="/dev/ttyUSB0", fw_data=b"firmware contents", expected_installed_firmware_type=ApplicationType.SPINEL, - bootloader_reset_type=None, + bootloader_reset_methods=(), progress_callback=progress_callback, ) assert progress_callback.mock_calls == [call(0, 100), call(50, 100), call(100, 100)] assert after_flash_info == expected_firmware_info + # Verify Flasher was called with correct bootloader_reset parameter + assert flasher_mock.call_count == 1 + assert flasher_mock.mock_calls[0].kwargs["bootloader_reset"] == () + # Both owning integrations/addons are stopped and restarted assert owner1.temporarily_stop.mock_calls == [ call(hass), @@ -653,7 +657,7 @@ async def test_async_flash_silabs_firmware_flash_failure(hass: HomeAssistant) -> device="/dev/ttyUSB0", fw_data=b"firmware contents", expected_installed_firmware_type=ApplicationType.SPINEL, - bootloader_reset_type=None, + bootloader_reset_methods=(), ) # Both owning integrations/addons are stopped and restarted @@ -713,7 +717,7 @@ async def test_async_flash_silabs_firmware_probe_failure(hass: HomeAssistant) -> device="/dev/ttyUSB0", fw_data=b"firmware contents", expected_installed_firmware_type=ApplicationType.SPINEL, - bootloader_reset_type=None, + bootloader_reset_methods=(), ) # Both owning integrations/addons are stopped and restarted diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 3a85ed017cb446..0cb1b2ab3f4d7a 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -353,23 +353,18 @@ async def test_firmware_options_flow_zigbee(hass: HomeAssistant) -> None: assert description_placeholders["firmware_type"] == "spinel" assert description_placeholders["model"] == "Home Assistant Yellow" - async def mock_install_firmware_step( - self, - fw_update_url: str, - fw_type: str, - firmware_name: str, - expected_installed_firmware_type: ApplicationType, - step_id: str, - next_step_id: str, - ) -> ConfigFlowResult: - self._probed_firmware_info = FirmwareInfo( - device=RADIO_DEVICE, - firmware_type=expected_installed_firmware_type, - firmware_version=fw_version, - owners=[], - source="probe", - ) - return await getattr(self, f"async_step_{next_step_id}")() + mock_update_client = AsyncMock() + mock_manifest = Mock() + mock_firmware = Mock() + mock_firmware.filename = "yellow_zigbee_ncp_7.4.4.0.gbl" + mock_firmware.metadata = { + "ezsp_version": "7.4.4.0", + "fw_type": "yellow_zigbee_ncp", + "metadata_version": 2, + } + mock_manifest.firmwares = [mock_firmware] + mock_update_client.async_update_data.return_value = mock_manifest + mock_update_client.async_fetch_firmware.return_value = b"firmware_data" with ( patch( @@ -377,9 +372,42 @@ async def mock_install_firmware_step( return_value=[], ), patch( - "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareInstallFlow._install_firmware_step", - autospec=True, - side_effect=mock_install_firmware_step, + "homeassistant.components.homeassistant_hardware.firmware_config_flow.FirmwareUpdateClient", + return_value=mock_update_client, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.async_flash_silabs_firmware", + return_value=FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=fw_type, + firmware_version=fw_version, + owners=[], + source="probe", + ), + ) as flash_mock, + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_info", + side_effect=[ + # First call: probe before installation (returns current SPINEL firmware) + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=ApplicationType.SPINEL, + firmware_version="2.4.4.0", + owners=[], + source="probe", + ), + # Second call: probe after installation (returns new EZSP firmware) + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=fw_type, + firmware_version=fw_version, + owners=[], + source="probe", + ), + ], + ), + patch( + "homeassistant.components.homeassistant_hardware.util.parse_firmware_image" ), ): pick_result = await hass.config_entries.options.async_configure( @@ -402,6 +430,10 @@ async def mock_install_firmware_step( "firmware_version": fw_version, } + # Verify async_flash_silabs_firmware was called with Yellow's reset method + assert flash_mock.call_count == 1 + assert flash_mock.mock_calls[0].kwargs["bootloader_reset_methods"] == ["yellow"] + @pytest.mark.usefixtures("addon_installed") async def test_firmware_options_flow_thread( diff --git a/tests/components/recorder/auto_repairs/test_schema.py b/tests/components/recorder/auto_repairs/test_schema.py index bf2a925df17b09..55b034197675aa 100644 --- a/tests/components/recorder/auto_repairs/test_schema.py +++ b/tests/components/recorder/auto_repairs/test_schema.py @@ -30,9 +30,9 @@ async def mock_recorder_before_hass( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) +@pytest.mark.usefixtures("recorder_mock") async def test_validate_db_schema( hass: HomeAssistant, - recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, diff --git a/tests/components/recorder/table_managers/test_recorder_runs.py b/tests/components/recorder/table_managers/test_recorder_runs.py index e79def01badb7a..3567b57750f5aa 100644 --- a/tests/components/recorder/table_managers/test_recorder_runs.py +++ b/tests/components/recorder/table_managers/test_recorder_runs.py @@ -3,8 +3,9 @@ from datetime import timedelta from unittest.mock import patch +import pytest + from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.db_schema import RecorderRuns from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -13,7 +14,8 @@ from tests.typing import RecorderInstanceGenerator -async def test_run_history(recorder_mock: Recorder, hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_run_history(hass: HomeAssistant) -> None: """Test the run history gives the correct run.""" instance = recorder.get_instance(hass) now = dt_util.utcnow() diff --git a/tests/components/recorder/test_backup.py b/tests/components/recorder/test_backup.py index a4362b1fa4c076..22db04c5076f21 100644 --- a/tests/components/recorder/test_backup.py +++ b/tests/components/recorder/test_backup.py @@ -5,13 +5,13 @@ import pytest -from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.backup import async_post_backup, async_pre_backup from homeassistant.core import CoreState, HomeAssistant from homeassistant.exceptions import HomeAssistantError -async def test_async_pre_backup(recorder_mock: Recorder, hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_async_pre_backup(hass: HomeAssistant) -> None: """Test pre backup.""" with patch( "homeassistant.components.recorder.core.Recorder.lock_database" @@ -36,8 +36,8 @@ async def test_async_pre_backup(recorder_mock: Recorder, hass: HomeAssistant) -> (CoreState.stopping, RAISES_HASS_NOT_RUNNING, 0), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_async_pre_backup_core_state( - recorder_mock: Recorder, hass: HomeAssistant, core_state: CoreState, expected_result: AbstractContextManager, @@ -55,9 +55,8 @@ async def test_async_pre_backup_core_state( assert len(lock_mock.mock_calls) == lock_calls -async def test_async_pre_backup_with_timeout( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_async_pre_backup_with_timeout(hass: HomeAssistant) -> None: """Test pre backup with timeout.""" with ( patch( @@ -70,9 +69,8 @@ async def test_async_pre_backup_with_timeout( assert lock_mock.called -async def test_async_pre_backup_with_migration( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_async_pre_backup_with_migration(hass: HomeAssistant) -> None: """Test pre backup with migration.""" with ( patch( @@ -88,7 +86,8 @@ async def test_async_pre_backup_with_migration( assert not lock_mock.called -async def test_async_post_backup(recorder_mock: Recorder, hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_async_post_backup(hass: HomeAssistant) -> None: """Test post backup.""" with patch( "homeassistant.components.recorder.core.Recorder.unlock_database" @@ -97,9 +96,8 @@ async def test_async_post_backup(recorder_mock: Recorder, hass: HomeAssistant) - assert unlock_mock.called -async def test_async_post_backup_failure( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_async_post_backup_failure(hass: HomeAssistant) -> None: """Test post backup failure.""" with ( patch( diff --git a/tests/components/recorder/test_filters_with_entityfilter.py b/tests/components/recorder/test_filters_with_entityfilter.py index 97839803619148..421039bcbb1bd6 100644 --- a/tests/components/recorder/test_filters_with_entityfilter.py +++ b/tests/components/recorder/test_filters_with_entityfilter.py @@ -2,10 +2,11 @@ import json +import pytest from sqlalchemy import select from sqlalchemy.engine.row import Row -from homeassistant.components.recorder import Recorder, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.db_schema import EventData, Events, StatesMeta from homeassistant.components.recorder.filters import ( Filters, @@ -75,8 +76,9 @@ def _get_events_with_session(): return filtered_states_entity_ids, filtered_events_entity_ids +@pytest.mark.usefixtures("recorder_mock") async def test_included_and_excluded_simple_case_no_domains( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded without domains.""" filter_accept = {"sensor.kitchen4", "switch.kitchen"} @@ -133,9 +135,8 @@ async def test_included_and_excluded_simple_case_no_domains( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_and_excluded_simple_case_no_globs( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_included_and_excluded_simple_case_no_globs(hass: HomeAssistant) -> None: """Test filters with included and excluded without globs.""" filter_accept = {"switch.bla", "sensor.blu", "sensor.keep"} filter_reject = {"sensor.bli"} @@ -175,8 +176,9 @@ async def test_included_and_excluded_simple_case_no_globs( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("recorder_mock") async def test_included_and_excluded_simple_case_without_underscores( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded without underscores.""" filter_accept = {"light.any", "sensor.kitchen4", "switch.kitchen"} @@ -229,8 +231,9 @@ async def test_included_and_excluded_simple_case_without_underscores( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("recorder_mock") async def test_included_and_excluded_simple_case_with_underscores( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded with underscores.""" filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"} @@ -283,9 +286,8 @@ async def test_included_and_excluded_simple_case_with_underscores( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_and_excluded_complex_case( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_included_and_excluded_complex_case(hass: HomeAssistant) -> None: """Test filters with included and excluded with a complex filter.""" filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"} filter_reject = { @@ -342,9 +344,8 @@ async def test_included_and_excluded_complex_case( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_entities_and_excluded_domain( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_included_entities_and_excluded_domain(hass: HomeAssistant) -> None: """Test filters with included entities and excluded domain.""" filter_accept = { "media_player.test", @@ -390,9 +391,8 @@ async def test_included_entities_and_excluded_domain( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_same_domain_included_excluded( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_same_domain_included_excluded(hass: HomeAssistant) -> None: """Test filters with the same domain included and excluded.""" filter_accept = { "media_player.test", @@ -438,9 +438,8 @@ async def test_same_domain_included_excluded( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_same_entity_included_excluded( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_same_entity_included_excluded(hass: HomeAssistant) -> None: """Test filters with the same entity included and excluded.""" filter_accept = { "media_player.test", @@ -486,8 +485,9 @@ async def test_same_entity_included_excluded( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("recorder_mock") async def test_same_entity_included_excluded_include_domain_wins( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with domain and entities and the include domain wins.""" filter_accept = { @@ -536,9 +536,8 @@ async def test_same_entity_included_excluded_include_domain_wins( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_specificly_included_entity_always_wins( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_specificly_included_entity_always_wins(hass: HomeAssistant) -> None: """Test specifically included entity always wins.""" filter_accept = { "media_player.test2", @@ -586,8 +585,9 @@ async def test_specificly_included_entity_always_wins( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("recorder_mock") async def test_specificly_included_entity_always_wins_over_glob( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test specifically included entity always wins over a glob.""" filter_accept = { diff --git a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py index 2e9883aaf53668..aa0dcddcf9d0aa 100644 --- a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py +++ b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py @@ -104,8 +104,9 @@ def _get_events_with_session(): return filtered_states_entity_ids, filtered_events_entity_ids +@pytest.mark.usefixtures("legacy_recorder_mock") async def test_included_and_excluded_simple_case_no_domains( - legacy_recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded without domains.""" filter_accept = {"sensor.kitchen4", "switch.kitchen"} @@ -162,9 +163,8 @@ async def test_included_and_excluded_simple_case_no_domains( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_and_excluded_simple_case_no_globs( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_included_and_excluded_simple_case_no_globs(hass: HomeAssistant) -> None: """Test filters with included and excluded without globs.""" filter_accept = {"switch.bla", "sensor.blu", "sensor.keep"} filter_reject = {"sensor.bli"} @@ -204,8 +204,9 @@ async def test_included_and_excluded_simple_case_no_globs( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("legacy_recorder_mock") async def test_included_and_excluded_simple_case_without_underscores( - legacy_recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded without underscores.""" filter_accept = {"light.any", "sensor.kitchen4", "switch.kitchen"} @@ -258,8 +259,9 @@ async def test_included_and_excluded_simple_case_without_underscores( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("legacy_recorder_mock") async def test_included_and_excluded_simple_case_with_underscores( - legacy_recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with included and excluded with underscores.""" filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"} @@ -312,9 +314,8 @@ async def test_included_and_excluded_simple_case_with_underscores( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_and_excluded_complex_case( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_included_and_excluded_complex_case(hass: HomeAssistant) -> None: """Test filters with included and excluded with a complex filter.""" filter_accept = {"light.any", "sensor.kitchen_4", "switch.kitchen"} filter_reject = { @@ -371,9 +372,8 @@ async def test_included_and_excluded_complex_case( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_included_entities_and_excluded_domain( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_included_entities_and_excluded_domain(hass: HomeAssistant) -> None: """Test filters with included entities and excluded domain.""" filter_accept = { "media_player.test", @@ -419,9 +419,8 @@ async def test_included_entities_and_excluded_domain( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_same_domain_included_excluded( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_same_domain_included_excluded(hass: HomeAssistant) -> None: """Test filters with the same domain included and excluded.""" filter_accept = { "media_player.test", @@ -467,9 +466,8 @@ async def test_same_domain_included_excluded( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_same_entity_included_excluded( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_same_entity_included_excluded(hass: HomeAssistant) -> None: """Test filters with the same entity included and excluded.""" filter_accept = { "media_player.test", @@ -515,8 +513,9 @@ async def test_same_entity_included_excluded( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("legacy_recorder_mock") async def test_same_entity_included_excluded_include_domain_wins( - legacy_recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test filters with domain and entities and the include domain wins.""" filter_accept = { @@ -565,9 +564,8 @@ async def test_same_entity_included_excluded_include_domain_wins( assert not filtered_events_entity_ids.intersection(filter_reject) -async def test_specificly_included_entity_always_wins( - legacy_recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("legacy_recorder_mock") +async def test_specificly_included_entity_always_wins(hass: HomeAssistant) -> None: """Test specifically included entity always wins.""" filter_accept = { "media_player.test2", @@ -615,8 +613,9 @@ async def test_specificly_included_entity_always_wins( assert not filtered_events_entity_ids.intersection(filter_reject) +@pytest.mark.usefixtures("legacy_recorder_mock") async def test_specificly_included_entity_always_wins_over_glob( - legacy_recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test specifically included entity always wins over a glob.""" filter_accept = { diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 2023e15176fc2a..f00ed177807dad 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -1686,12 +1686,11 @@ class CannotSerializeMe: @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("recorder_config", [{CONF_COMMIT_INTERVAL: 0}]) async def test_database_corruption_while_running( hass: HomeAssistant, - recorder_mock: Recorder, recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 40baffa7b3ef4f..d29ee04a46980e 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -847,8 +847,8 @@ async def test_statistics_duplicated( ("recorder", "sensor.total_energy_import", async_import_statistics), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_import_statistics( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/recorder/test_system_health.py b/tests/components/recorder/test_system_health.py index 0efaa82e5e5f90..845b95df256bbd 100644 --- a/tests/components/recorder/test_system_health.py +++ b/tests/components/recorder/test_system_health.py @@ -4,7 +4,7 @@ import pytest -from homeassistant.components.recorder import Recorder, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.const import SupportedDialect from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -16,9 +16,9 @@ @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) -@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.usefixtures("skip_by_db_engine", "recorder_mock") async def test_recorder_system_health( - recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str + hass: HomeAssistant, recorder_db_url: str ) -> None: """Test recorder system health. @@ -41,8 +41,8 @@ async def test_recorder_system_health( @pytest.mark.parametrize( "db_engine", [SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL] ) +@pytest.mark.usefixtures("recorder_mock") async def test_recorder_system_health_alternate_dbms( - recorder_mock: Recorder, hass: HomeAssistant, db_engine: SupportedDialect, recorder_dialect_name: None, @@ -70,8 +70,8 @@ async def test_recorder_system_health_alternate_dbms( @pytest.mark.parametrize( "db_engine", [SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL] ) +@pytest.mark.usefixtures("recorder_mock") async def test_recorder_system_health_db_url_missing_host( - recorder_mock: Recorder, hass: HomeAssistant, db_engine: SupportedDialect, recorder_dialect_name: None, diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 46ad05f94bd1f1..aa302548517a5f 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -178,8 +178,9 @@ def test_converters_align_with_sensor() -> None: assert any(c for c in UNIT_CONVERTERS.values() if unit_class == c.UNIT_CLASS) +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistics_during_period.""" now = get_start_time(dt_util.utcnow()) @@ -1067,8 +1068,9 @@ async def test_statistic_during_period_circular_mean( @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") async def test_statistic_during_period_hole( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" now = dt_util.utcnow() @@ -1377,8 +1379,8 @@ async def test_statistic_during_period_hole_circular_mean( datetime.datetime(2022, 10, 21, 7, 31, tzinfo=datetime.UTC), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_statistic_during_period_partial_overlap( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, @@ -1774,8 +1776,8 @@ async def assert_stat_during_fixed(client, start_time, end_time, expect): ), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_statistic_during_period_calendar( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, calendar_period, @@ -1830,8 +1832,8 @@ async def test_statistic_during_period_calendar( (VOLUME_SENSOR_M3_ATTRIBUTES, 10, 10, {"volume": "ft³"}, 353.14666), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_unit_conversion( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, attributes, @@ -1917,8 +1919,8 @@ async def test_statistics_during_period_unit_conversion( (VOLUME_SENSOR_M3_ATTRIBUTES_TOTAL, 10, 10, {"volume": "ft³"}, 353.147), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_sum_statistics_during_period_unit_conversion( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, attributes, @@ -2007,8 +2009,8 @@ async def test_sum_statistics_during_period_unit_conversion( {"volume": "kWh"}, ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_invalid_unit_conversion( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, custom_units, @@ -2049,8 +2051,9 @@ async def test_statistics_during_period_invalid_unit_conversion( assert response["error"]["code"] == "invalid_format" +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_in_the_past( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistics_during_period in the past.""" await hass.config.async_set_time_zone("UTC") @@ -2161,8 +2164,9 @@ async def test_statistics_during_period_in_the_past( assert response["result"] == {} +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_bad_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass_ws_client: WebSocketGenerator, ) -> None: """Test statistics_during_period.""" client = await hass_ws_client() @@ -2179,8 +2183,9 @@ async def test_statistics_during_period_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_bad_end_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass_ws_client: WebSocketGenerator, ) -> None: """Test statistics_during_period.""" now = dt_util.utcnow() @@ -2200,8 +2205,9 @@ async def test_statistics_during_period_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_no_statistic_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass_ws_client: WebSocketGenerator, ) -> None: """Test statistics_during_period without passing statistic_ids.""" now = dt_util.utcnow() @@ -2220,8 +2226,9 @@ async def test_statistics_during_period_no_statistic_ids( assert response["error"]["code"] == "invalid_format" +@pytest.mark.usefixtures("recorder_mock") async def test_statistics_during_period_empty_statistic_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass_ws_client: WebSocketGenerator, ) -> None: """Test statistics_during_period with passing an empty list of statistic_ids.""" now = dt_util.utcnow() @@ -2300,8 +2307,8 @@ async def test_statistics_during_period_empty_statistic_ids( (METRIC_SYSTEM, VOLUME_SENSOR_FT3_ATTRIBUTES_TOTAL, "ft³", "ft³", "volume"), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_list_statistic_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, units, @@ -2478,8 +2485,8 @@ async def test_list_statistic_ids( ), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_list_statistic_ids_unit_change( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, attributes, @@ -2551,9 +2558,8 @@ async def test_list_statistic_ids_unit_change( ] -async def test_validate_statistics( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_validate_statistics(hass_ws_client: WebSocketGenerator) -> None: """Test validate_statistics can be called.""" async def assert_validation_result(client, expected_result): @@ -2567,9 +2573,8 @@ async def assert_validation_result(client, expected_result): await assert_validation_result(client, {}) -async def test_update_statistics_issues( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_update_statistics_issues(hass_ws_client: WebSocketGenerator) -> None: """Test update_statistics_issues can be called.""" client = await hass_ws_client() @@ -2579,8 +2584,9 @@ async def test_update_statistics_issues( assert response["result"] is None +@pytest.mark.usefixtures("recorder_mock") async def test_clear_statistics( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test removing statistics.""" now = get_start_time(dt_util.utcnow()) @@ -2699,9 +2705,8 @@ async def test_clear_statistics( assert response["result"] == {"sensor.test2": expected_response["sensor.test2"]} -async def test_clear_statistics_time_out( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_clear_statistics_time_out(hass_ws_client: WebSocketGenerator) -> None: """Test removing statistics with time-out error.""" client = await hass_ws_client() @@ -2727,8 +2732,8 @@ async def test_clear_statistics_time_out( ("new_unit", "new_unit_class", "new_display_unit"), [("dogs", None, "dogs"), (None, "unitless", None), ("W", "power", "kW")], ) +@pytest.mark.usefixtures("recorder_mock") async def test_update_statistics_metadata( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, new_unit, @@ -2825,8 +2830,9 @@ async def test_update_statistics_metadata( } +@pytest.mark.usefixtures("recorder_mock") async def test_update_statistics_metadata_time_out( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass_ws_client: WebSocketGenerator, ) -> None: """Test update statistics metadata with time-out error.""" client = await hass_ws_client() @@ -2850,8 +2856,9 @@ async def test_update_statistics_metadata_time_out( } +@pytest.mark.usefixtures("recorder_mock") async def test_change_statistics_unit( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test change unit of recorded statistics.""" now = get_start_time(dt_util.utcnow()) @@ -2997,8 +3004,8 @@ async def test_change_statistics_unit( ] +@pytest.mark.usefixtures("recorder_mock") async def test_change_statistics_unit_errors( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, @@ -3109,8 +3116,9 @@ async def assert_statistics(expected): await assert_statistics(expected_statistics) +@pytest.mark.usefixtures("recorder_mock") async def test_recorder_info( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test getting recorder status.""" client = await hass_ws_client() @@ -3323,8 +3331,8 @@ async def test_backup_start_no_recorder( (METRIC_SYSTEM, VOLUME_SENSOR_M3_ATTRIBUTES, "m³", "volume"), ], ) +@pytest.mark.usefixtures("recorder_mock") async def test_get_statistics_metadata( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, units, diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 69a7e266dcab67..30ae74079f04e0 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -1,6 +1,6 @@ """Tests for the Shelly integration.""" -from collections.abc import Mapping +from collections.abc import Mapping, Sequence from copy import deepcopy from datetime import timedelta from typing import Any @@ -10,6 +10,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.shelly.const import ( CONF_GEN, @@ -173,15 +174,27 @@ async def snapshot_device_entities( config_entry_id: str, ) -> None: """Snapshot all device entities.""" + + def sort_event_types(data: Any, path: Sequence[tuple[str, Any]]) -> Any: + """Sort the event_types list for event entity.""" + if path and path[-1][0] == "event_types" and isinstance(data, list): + return sorted(data) + + return data + entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id) assert entity_entries for entity_entry in entity_entries: - assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_entry == snapshot( + name=f"{entity_entry.entity_id}-entry", exclude=props("event_types") + ) assert entity_entry.disabled_by is None, "Please enable all entities." state = hass.states.get(entity_entry.entity_id) assert state, f"State not found for {entity_entry.entity_id}" - assert state == snapshot(name=f"{entity_entry.entity_id}-state") + assert state == snapshot( + name=f"{entity_entry.entity_id}-state", matcher=sort_event_types + ) async def force_uptime_value( diff --git a/tests/components/shelly/fixtures/wall_display_xl.json b/tests/components/shelly/fixtures/wall_display_xl.json new file mode 100644 index 00000000000000..6b6112202589cf --- /dev/null +++ b/tests/components/shelly/fixtures/wall_display_xl.json @@ -0,0 +1,307 @@ +{ + "config": { + "ble": { + "enable": false, + "keep_running": true, + "rpc": { + "enable": true + }, + "observer": { + "enable": false + } + }, + "wifi": { + "sta": { + "enable": true, + "ssid": "Wifi-Network-Name", + "roam_interval": 900, + "is_open": false, + "ipv4mode": "dhcp", + "ip": "192.168.2.81", + "netmask": "255.255.255.0", + "gw": "192.168.2.1", + "nameserver": "192.168.2.1" + } + }, + "switch:0": { + "in_mode": "detached", + "id": 0, + "auto_off": false, + "auto_on_delay": 0, + "initial_state": "off", + "name": null + }, + "input:0": { + "type": "button", + "id": 0, + "invert": false, + "factory_reset": true, + "name": null + }, + "input:1": { + "id": 1, + "type": "switch", + "invert": false, + "factory_reset": true, + "name": null + }, + "input:2": { + "id": 2, + "type": "switch", + "invert": false, + "factory_reset": true, + "name": null + }, + "input:3": { + "id": 3, + "type": "button", + "invert": false, + "factory_reset": true, + "name": null + }, + "input:4": { + "id": 4, + "type": "button", + "invert": false, + "factory_reset": true, + "name": null + }, + "temperature:0": { + "id": 0, + "report_thr_C": 1, + "offset_C": 0, + "name": null + }, + "humidity:0": { + "id": 0, + "report_thr": 1, + "offset": 0, + "name": null + }, + "illuminance:0": { + "id": 0, + "bright_thr": 200, + "dark_thr": 30, + "name": null + }, + "ui": { + "lock_type": "none", + "disable_gestures_when_locked": false, + "use_F": false, + "screen_saver": { + "enable": false, + "timeout": 20, + "priority_element": "CLOCK" + }, + "screen_off_when_idle": false, + "brightness": { + "auto": true, + "level": 70, + "auto_off": { + "enable": false, + "by_lux": false + } + }, + "relay_state_overlay": { + "enable": true, + "always_visible": false + } + }, + "sys": { + "cfg_rev": 50, + "device": { + "fw_id": "20250923-131544/2.4.4-5c68f1d6", + "mac": "AABBCCDDEEFF", + "discoverable": false, + "name": null + }, + "location": { + "tz": "Europe/Brussels", + "lat": 99.8888, + "lon": 22.3333 + }, + "sntp": { + "server": "time.google.com" + }, + "debug": { + "websocket": { + "enable": false + }, + "mqtt": { + "enable": false + }, + "logs": { + "Generic": true, + "Bluetooth": true, + "Cloud": true, + "Interface": true, + "Media": true, + "MQTT": true, + "Network": true, + "RPC": true, + "Thermostat": true, + "Screen": true, + "ShellySmartControl": true, + "Webhooks": true, + "WebSocket": true + } + }, + "media_player_enabled": true + }, + "cloud": { + "server": "shelly-105-eu.shelly.cloud:6022/jrpc", + "enable": true + }, + "mqtt": { + "enable": false, + "client_id": "ShellyWallDisplay-AABBCCDDEEFF", + "topic_prefix": "ShellyWallDisplay-AABBCCDDEEFF" + }, + "ws": { + "enable": false, + "ssl_ca": "ca.pem" + }, + "media": { + "rev": 0 + } + }, + "shelly": { + "id": "ShellyWallDisplay-AABBCCDDEEFF", + "mac": "AABBCCDDEEFF", + "model": "SAWD-3A1XE10EU2", + "gen": 2, + "fw_id": "20250923-131544/2.4.4-5c68f1d6", + "ver": "2.4.4", + "app": "WallDisplayV2", + "auth_en": false, + "uptime": 930619, + "app_uptime": 61029, + "ram_size": 268435456, + "ram_free": 50023040, + "fs_size": 24480665600, + "fs_free": 24071430144, + "discoverable": false, + "cfg_rev": 50, + "schedule_rev": 0, + "webhook_rev": 22, + "platform": "vBlake.a21b392", + "serial": "ABCDFE5674", + "batch_id": "3d35b", + "batch_date": 250715, + "available_updates": {}, + "restart_required": false, + "unixtime": 1759216204, + "relay_in_thermostat": false, + "sensor_in_thermostat": false, + "awaiting_auth_code": false, + "ch": ["switch:0"] + }, + "status": { + "ble": {}, + "cloud": { + "connected": true + }, + "mqtt": { + "connected": false + }, + "temperature:0": { + "id": 0, + "tC": -275.1499938964844, + "tF": -463.2, + "errors": ["Sensor driver missing from firmware"] + }, + "humidity:0": { + "id": 0, + "rh": -2, + "errors": ["Sensor driver missing from firmware"] + }, + "illuminance:0": { + "id": 0, + "lux": 120, + "illumination": "twilight" + }, + "switch:0": { + "id": 0, + "output": true, + "source": "RPC Set" + }, + "input:0": { + "id": 0, + "state": false + }, + "input:1": { + "id": 1 + }, + "input:2": { + "id": 2, + "state": true + }, + "input:3": { + "id": 3 + }, + "input:4": { + "id": 4 + }, + "sys": { + "id": "ShellyWallDisplay-AABBCCDDEEFF", + "mac": "AABBCCDDEEFF", + "model": "SAWD-3A1XE10EU2", + "gen": 2, + "fw_id": "20250923-131544/2.4.4-5c68f1d6", + "ver": "2.4.4", + "app": "WallDisplayV2", + "auth_en": false, + "uptime": 930619, + "app_uptime": 61029, + "ram_size": 268435456, + "ram_free": 50023040, + "fs_size": 24480665600, + "fs_free": 24071430144, + "discoverable": false, + "cfg_rev": 50, + "schedule_rev": 0, + "webhook_rev": 22, + "platform": "vBlake.a21b392", + "serial": "SAWD9570149AV", + "batch_id": "3d35b", + "batch_date": 250715, + "available_updates": {}, + "restart_required": false, + "unixtime": 1759216205, + "relay_in_thermostat": false, + "sensor_in_thermostat": false, + "awaiting_auth_code": false, + "ch": ["switch:0"] + }, + "wifi": { + "sta_ip": "192.168.2.81", + "status": "got ip", + "mac": "00:A9:0B:70:14:9A", + "ssid": "Wifi-Network-Name", + "rssi": -48, + "netmask": "255.255.255.0", + "gw": "192.168.2.1", + "nameserver": "192.168.2.1" + }, + "media": { + "playback": { + "enable": false, + "buffering": false, + "volume": 7 + }, + "total_size": 3885854, + "total_size_h": "3.706 MB", + "item_counts": { + "audio": 0, + "photo": 0, + "video": 0 + } + }, + "devicepower:0": { + "id": 0, + "external": { + "present": true + } + } + } +} diff --git a/tests/components/shelly/snapshots/test_devices.ambr b/tests/components/shelly/snapshots/test_devices.ambr index 47c952258d5100..65ce2cde2b0138 100644 --- a/tests/components/shelly/snapshots/test_devices.ambr +++ b/tests/components/shelly/snapshots/test_devices.ambr @@ -5043,3 +5043,922 @@ 'state': 'off', }) # --- +# name: test_wall_display_xl[binary_sensor.test_name_cloud-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_name_cloud', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloud', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-cloud-cloud', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_cloud-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test name Cloud', + }), + 'context': , + 'entity_id': 'binary_sensor.test_name_cloud', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_external_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_name_external_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'External power', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-devicepower:0-external_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_external_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test name External power', + }), + 'context': , + 'entity_id': 'binary_sensor.test_name_external_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_input_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_name_input_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Input 2', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-input:2-input', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_input_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test name Input 2', + }), + 'context': , + 'entity_id': 'binary_sensor.test_name_input_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_restart_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_name_restart_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart required', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-sys-restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[binary_sensor.test_name_restart_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test name Restart required', + }), + 'context': , + 'entity_id': 'binary_sensor.test_name_restart_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_wall_display_xl[button.test_name_reboot-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.test_name_reboot', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reboot', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[button.test_name_reboot-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Test name Reboot', + }), + 'context': , + 'entity_id': 'button.test_name_reboot', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_wall_display_xl[event.test_name_input_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.test_name_input_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Input 0', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'input', + 'unique_id': '123456789ABC-input:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[event.test_name_input_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'btn_down', + 'btn_up', + 'double_push', + 'long_push', + 'single_push', + 'triple_push', + ]), + 'friendly_name': 'Test name Input 0', + }), + 'context': , + 'entity_id': 'event.test_name_input_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_wall_display_xl[event.test_name_input_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.test_name_input_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Input 3', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'input', + 'unique_id': '123456789ABC-input:3', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[event.test_name_input_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'btn_down', + 'btn_up', + 'double_push', + 'long_push', + 'single_push', + 'triple_push', + ]), + 'friendly_name': 'Test name Input 3', + }), + 'context': , + 'entity_id': 'event.test_name_input_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_wall_display_xl[event.test_name_input_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.test_name_input_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Input 4', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'input', + 'unique_id': '123456789ABC-input:4', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[event.test_name_input_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'btn_down', + 'btn_up', + 'double_push', + 'long_push', + 'single_push', + 'triple_push', + ]), + 'friendly_name': 'Test name Input 4', + }), + 'context': , + 'entity_id': 'event.test_name_input_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_name_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-humidity:0-humidity_0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Test name Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_name_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-2', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_illuminance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_name_illuminance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Illuminance', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-illuminance:0-illuminance', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_illuminance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Test name Illuminance', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.test_name_illuminance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_illuminance_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'dark', + 'twilight', + 'bright', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_name_illuminance_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Illuminance level', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'illuminance_level', + 'unique_id': '123456789ABC-illuminance:0-illuminance_illumination', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[sensor.test_name_illuminance_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test name Illuminance level', + 'options': list([ + 'dark', + 'twilight', + 'bright', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_name_illuminance_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'twilight', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_rssi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_name_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RSSI', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-wifi-rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_rssi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Test name RSSI', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.test_name_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-48', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_name_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-temperature:0-temperature_0', + 'unit_of_measurement': , + }) +# --- +# name: test_wall_display_xl[sensor.test_name_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test name Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_name_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-275.149993896484', + }) +# --- +# name: test_wall_display_xl[sensor.test_name_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_name_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-sys-uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[sensor.test_name_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test name Uptime', + }), + 'context': , + 'entity_id': 'sensor.test_name_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-05-15T21:33:41+00:00', + }) +# --- +# name: test_wall_display_xl[switch.test_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC-switch:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[switch.test_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test name', + }), + 'context': , + 'entity_id': 'switch.test_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_wall_display_xl[update.test_name_beta_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.test_name_beta_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Beta firmware', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-sys-fwupdate_beta', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[update.test_name_beta_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/shelly/icon.png', + 'friendly_name': 'Test name Beta firmware', + 'in_progress': False, + 'installed_version': '2.4.4', + 'latest_version': '2.4.4', + 'release_summary': None, + 'release_url': 'https://shelly-api-docs.shelly.cloud/gen2/changelog/#unreleased', + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_name_beta_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_wall_display_xl[update.test_name_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.test_name_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'shelly', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-sys-fwupdate', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_xl[update.test_name_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/shelly/icon.png', + 'friendly_name': 'Test name Firmware', + 'in_progress': False, + 'installed_version': '2.4.4', + 'latest_version': '2.4.4', + 'release_summary': None, + 'release_url': 'https://shelly-api-docs.shelly.cloud/gen2/changelog/', + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_name_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/shelly/test_devices.py b/tests/components/shelly/test_devices.py index 71eaeb2a333f57..1e2f8088618bb1 100644 --- a/tests/components/shelly/test_devices.py +++ b/tests/components/shelly/test_devices.py @@ -2,7 +2,12 @@ from unittest.mock import Mock -from aioshelly.const import MODEL_2PM_G3, MODEL_BLU_GATEWAY_G3, MODEL_PRO_EM3 +from aioshelly.const import ( + MODEL_2PM_G3, + MODEL_BLU_GATEWAY_G3, + MODEL_PRO_EM3, + MODEL_WALL_DISPLAY_XL, +) from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -529,3 +534,29 @@ async def test_blu_trv_device_info( assert device_entry.name == "TRV-Name" assert device_entry.model_id == "SBTR-001AEU" assert device_entry.sw_version == "v1.2.10" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_wall_display_xl( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + snapshot: SnapshotAssertion, + monkeypatch: pytest.MonkeyPatch, + freezer: FrozenDateTimeFactory, +) -> None: + """Test Wall Display XL.""" + device_fixture = await async_load_json_object_fixture( + hass, "wall_display_xl.json", DOMAIN + ) + monkeypatch.setattr(mock_rpc_device, "shelly", device_fixture["shelly"]) + monkeypatch.setattr(mock_rpc_device, "status", device_fixture["status"]) + monkeypatch.setattr(mock_rpc_device, "config", device_fixture["config"]) + + await force_uptime_value(hass, freezer) + + config_entry = await init_integration(hass, gen=2, model=MODEL_WALL_DISPLAY_XL) + + await snapshot_device_entities( + hass, entity_registry, snapshot, config_entry.entry_id + ) diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 0cdd1640e65897..ec5bd411ac3843 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -34,6 +34,7 @@ get_rpc_channel_name, get_rpc_input_triggers, is_block_momentary_input, + mac_address_from_name, ) from homeassistant.util import dt as dt_util @@ -327,3 +328,17 @@ def test_get_release_url( def test_get_host(host: str, expected: str) -> None: """Test get_host function.""" assert get_host(host) == expected + + +@pytest.mark.parametrize( + ("name", "result"), + [ + ("shelly1pm-AABBCCDDEEFF", "AABBCCDDEEFF"), + ("Shelly Plus 1 [DDEEFF]", None), + ("S11-Schlafzimmer", None), + ("22-Kueche-links", None), + ], +) +def test_mac_address_from_name(name: str, result: str | None) -> None: + """Test mac_address_from_name() function.""" + assert mac_address_from_name(name) == result diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index b4a8168419f494..ab2414b2681f2a 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -16,7 +16,7 @@ ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceNotSupported from homeassistant.helpers.config_validation import time_period_str from homeassistant.util import slugify @@ -137,32 +137,26 @@ async def test_plug_unsupported_services( entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" # Turn on with timer - await hass.services.async_call( - DOMAIN, - SERVICE_TURN_ON_WITH_TIMER_NAME, - { - ATTR_ENTITY_ID: entity_id, - CONF_TIMER_MINUTES: DUMMY_TIMER_MINUTES_SET, - }, - blocking=True, - ) + with pytest.raises(ServiceNotSupported): + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON_WITH_TIMER_NAME, + { + ATTR_ENTITY_ID: entity_id, + CONF_TIMER_MINUTES: DUMMY_TIMER_MINUTES_SET, + }, + blocking=True, + ) assert mock_api.call_count == 0 - assert ( - f"Service '{SERVICE_TURN_ON_WITH_TIMER_NAME}' is not supported by {device.name}" - in caplog.text - ) # Auto off - await hass.services.async_call( - DOMAIN, - SERVICE_SET_AUTO_OFF_NAME, - {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, - blocking=True, - ) + with pytest.raises(ServiceNotSupported): + await hass.services.async_call( + DOMAIN, + SERVICE_SET_AUTO_OFF_NAME, + {ATTR_ENTITY_ID: entity_id, CONF_AUTO_OFF: DUMMY_AUTO_OFF_SET}, + blocking=True, + ) assert mock_api.call_count == 0 - assert ( - f"Service '{SERVICE_SET_AUTO_OFF_NAME}' is not supported by {device.name}" - in caplog.text - ) diff --git a/tests/components/tuya/__init__.py b/tests/components/tuya/__init__.py index 1d12b972e7e9de..897050a660388a 100644 --- a/tests/components/tuya/__init__.py +++ b/tests/components/tuya/__init__.py @@ -21,6 +21,7 @@ "cl_cpbo62rn", # https://github.com/orgs/home-assistant/discussions/539 "cl_ebt12ypvexnixvtf", # https://github.com/tuya/tuya-home-assistant/issues/754 "cl_g1cp07dsqnbdbbki", # https://github.com/home-assistant/core/issues/139966 + "cl_lfkr93x0ukp5gaia", # https://github.com/home-assistant/core/issues/152826 "cl_qqdxfdht", # https://github.com/orgs/home-assistant/discussions/539 "cl_rD7uqAAgQOpSA2Rx", # https://github.com/home-assistant/core/issues/139966 "cl_zah67ekd", # https://github.com/home-assistant/core/issues/71242 diff --git a/tests/components/tuya/fixtures/cl_lfkr93x0ukp5gaia.json b/tests/components/tuya/fixtures/cl_lfkr93x0ukp5gaia.json new file mode 100644 index 00000000000000..197c9e9ac517b9 --- /dev/null +++ b/tests/components/tuya/fixtures/cl_lfkr93x0ukp5gaia.json @@ -0,0 +1,138 @@ +{ + "endpoint": "https://apigw.tuyaus.com", + "mqtt_connected": true, + "disabled_by": null, + "disabled_polling": false, + "name": "Projector Screen", + "category": "cl", + "product_id": "lfkr93x0ukp5gaia", + "product_name": "VIVIDSTORM SCREEN", + "online": true, + "sub": false, + "time_zone": "-05:00", + "active_time": "2025-05-02T23:54:36+00:00", + "create_time": "2025-05-02T23:54:36+00:00", + "update_time": "2025-05-02T23:54:36+00:00", + "function": { + "control": { + "type": "Enum", + "value": { + "range": ["open", "stop", "close", "continue"] + } + }, + "percent_control": { + "type": "Integer", + "value": { + "unit": "%", + "min": 0, + "max": 100, + "scale": 0, + "step": 1 + } + }, + "control_back_mode": { + "type": "Enum", + "value": { + "range": ["forward", "back"] + } + }, + "border": { + "type": "Enum", + "value": { + "range": ["up", "down", "up_delete", "down_delete", "remove_top_bottom"] + } + } + }, + "status_range": { + "control": { + "type": "Enum", + "value": { + "range": ["open", "stop", "close", "continue"] + } + }, + "percent_control": { + "type": "Integer", + "value": { + "unit": "%", + "min": 0, + "max": 100, + "scale": 0, + "step": 1 + } + }, + "percent_state": { + "type": "Integer", + "value": { + "unit": "%", + "min": 0, + "max": 100, + "scale": 0, + "step": 1 + } + }, + "control_back_mode": { + "type": "Enum", + "value": { + "range": ["forward", "back"] + } + }, + "work_state": { + "type": "Enum", + "value": { + "range": ["opening", "closing"] + } + }, + "countdown_left": { + "type": "Integer", + "value": { + "unit": "s", + "min": 0, + "max": 86400, + "scale": 0, + "step": 1 + } + }, + "time_total": { + "type": "Integer", + "value": { + "unit": "ms", + "min": 0, + "max": 120000, + "scale": 0, + "step": 1 + } + }, + "situation_set": { + "type": "Enum", + "value": { + "range": ["fully_open", "fully_close"] + } + }, + "fault": { + "type": "Bitmap", + "value": { + "label": ["motor_fault"] + } + }, + "border": { + "type": "Enum", + "value": { + "range": ["up", "down", "up_delete", "down_delete", "remove_top_bottom"] + } + } + }, + "status": { + "control": "close", + "percent_control": 100, + "percent_state": 0, + "control_back_mode": "forward", + "work_state": "opening", + "countdown_left": 0, + "time_total": 0, + "situation_set": "fully_open", + "fault": 0, + "border": "down" + }, + "set_up": true, + "support_local": true +} diff --git a/tests/components/tuya/snapshots/test_cover.ambr b/tests/components/tuya/snapshots/test_cover.ambr index 582ef64ff3f664..e41c7aa1c29cbd 100644 --- a/tests/components/tuya/snapshots/test_cover.ambr +++ b/tests/components/tuya/snapshots/test_cover.ambr @@ -456,6 +456,57 @@ 'state': 'open', }) # --- +# name: test_platform_setup_and_discovery[cover.projector_screen_curtain-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.projector_screen_curtain', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Curtain', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': , + 'translation_key': 'curtain', + 'unique_id': 'tuya.aiag5pku0x39rkfllccontrol', + 'unit_of_measurement': None, + }) +# --- +# name: test_platform_setup_and_discovery[cover.projector_screen_curtain-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'device_class': 'curtain', + 'friendly_name': 'Projector Screen Curtain', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.projector_screen_curtain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_platform_setup_and_discovery[cover.roller_shutter_living_room_curtain-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tuya/snapshots/test_init.ambr b/tests/components/tuya/snapshots/test_init.ambr index 399cc99e6b8416..3a586bf8011869 100644 --- a/tests/components/tuya/snapshots/test_init.ambr +++ b/tests/components/tuya/snapshots/test_init.ambr @@ -1766,6 +1766,37 @@ 'via_device_id': None, }) # --- +# name: test_device_registry[aiag5pku0x39rkfllc] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tuya', + 'aiag5pku0x39rkfllc', + ), + }), + 'labels': set({ + }), + 'manufacturer': 'Tuya', + 'model': 'VIVIDSTORM SCREEN', + 'model_id': 'lfkr93x0ukp5gaia', + 'name': 'Projector Screen', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_device_registry[aje5kxgmhhxdihqizc] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/tuya/snapshots/test_select.ambr b/tests/components/tuya/snapshots/test_select.ambr index 31862ae9d6cf9e..77b0c55340c134 100644 --- a/tests/components/tuya/snapshots/test_select.ambr +++ b/tests/components/tuya/snapshots/test_select.ambr @@ -3608,6 +3608,63 @@ 'state': 'back', }) # --- +# name: test_platform_setup_and_discovery[select.projector_screen_motor_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'forward', + 'back', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.projector_screen_motor_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motor mode', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'curtain_motor_mode', + 'unique_id': 'tuya.aiag5pku0x39rkfllccontrol_back_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_platform_setup_and_discovery[select.projector_screen_motor_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Projector Screen Motor mode', + 'options': list([ + 'forward', + 'back', + ]), + }), + 'context': , + 'entity_id': 'select.projector_screen_motor_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'forward', + }) +# --- # name: test_platform_setup_and_discovery[select.raspy4_home_assistant_indicator_light_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tuya/snapshots/test_sensor.ambr b/tests/components/tuya/snapshots/test_sensor.ambr index f2769f8324025f..53caaf34216d77 100644 --- a/tests/components/tuya/snapshots/test_sensor.ambr +++ b/tests/components/tuya/snapshots/test_sensor.ambr @@ -14164,6 +14164,55 @@ 'state': '0.0', }) # --- +# name: test_platform_setup_and_discovery[sensor.projector_screen_last_operation_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.projector_screen_last_operation_duration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last operation duration', + 'platform': 'tuya', + 'previous_unique_id': None, + 'suggested_object_id': None, + 'supported_features': 0, + 'translation_key': 'last_operation_duration', + 'unique_id': 'tuya.aiag5pku0x39rkfllctime_total', + 'unit_of_measurement': 'ms', + }) +# --- +# name: test_platform_setup_and_discovery[sensor.projector_screen_last_operation_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Projector Screen Last operation duration', + 'unit_of_measurement': 'ms', + }), + 'context': , + 'entity_id': 'sensor.projector_screen_last_operation_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_platform_setup_and_discovery[sensor.pth_9cw_32_carbon_dioxide-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 581d49f7eec928..ce1b1f92f37962 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -2035,6 +2035,14 @@ async def test_options_flow_creates_backup( @pytest.mark.parametrize( "async_unload_effect", [True, config_entries.OperationNotAllowed()] ) +@pytest.mark.parametrize( + ("input_flow_control", "conf_flow_control"), + [ + ("hardware", "hardware"), + ("software", "software"), + ("none", None), + ], +) @patch( "serial.tools.list_ports.comports", MagicMock( @@ -2047,7 +2055,11 @@ async def test_options_flow_creates_backup( ) @patch("homeassistant.components.zha.async_setup_entry", return_value=True) async def test_options_flow_defaults( - async_setup_entry, async_unload_effect, hass: HomeAssistant + async_setup_entry, + async_unload_effect, + input_flow_control, + conf_flow_control, + hass: HomeAssistant, ) -> None: """Test options flow defaults match radio defaults.""" @@ -2127,7 +2139,9 @@ async def test_options_flow_defaults( "flow_control": "none", } - with patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)): + with patch( + f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True) + ) as mock_probe: # Change the serial port path result5 = await hass.config_entries.options.async_configure( flow["flow_id"], @@ -2135,9 +2149,19 @@ async def test_options_flow_defaults( # Change everything CONF_DEVICE_PATH: "/dev/new_serial_port", CONF_BAUDRATE: 54321, - CONF_FLOW_CONTROL: "software", + CONF_FLOW_CONTROL: input_flow_control, }, ) + # verify we passed the correct flow control to the probe function + assert mock_probe.mock_calls == [ + call( + { + "path": "/dev/new_serial_port", + "baudrate": 54321, + "flow_control": conf_flow_control, + } + ) + ] # The radio has been detected, we can move on to creating the config entry assert result5["step_id"] == "choose_migration_strategy" @@ -2164,7 +2188,7 @@ async def test_options_flow_defaults( CONF_DEVICE: { CONF_DEVICE_PATH: "/dev/new_serial_port", CONF_BAUDRATE: 54321, - CONF_FLOW_CONTROL: "software", + CONF_FLOW_CONTROL: conf_flow_control, }, CONF_RADIO_TYPE: "znp", } diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 9e1f246b551495..604b375d299216 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -38,6 +38,17 @@ VERSION_PATH = os.path.join(get_test_config_dir(), config_util.VERSION_FILE) +CONFIG_LOG_FILE = get_test_config_dir("home-assistant.log") +ARG_LOG_FILE = "test.log" + + +def cleanup_log_files() -> None: + """Remove all log files.""" + for f in glob.glob(f"{CONFIG_LOG_FILE}*"): + os.remove(f) + for f in glob.glob(f"{ARG_LOG_FILE}*"): + os.remove(f) + @pytest.fixture(autouse=True) def disable_installed_check() -> Generator[None]: @@ -85,16 +96,11 @@ async def test_async_enable_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test to ensure logging is migrated to the queue handlers.""" - config_log_file_pattern = get_test_config_dir("home-assistant.log*") - arg_log_file_pattern = "test.log*" # Ensure we start with a clean slate - for f in glob.glob(arg_log_file_pattern): - os.remove(f) - for f in glob.glob(config_log_file_pattern): - os.remove(f) - assert len(glob.glob(config_log_file_pattern)) == 0 - assert len(glob.glob(arg_log_file_pattern)) == 0 + cleanup_log_files() + assert len(glob.glob(CONFIG_LOG_FILE)) == 0 + assert len(glob.glob(ARG_LOG_FILE)) == 0 with ( patch("logging.getLogger"), @@ -108,7 +114,7 @@ async def test_async_enable_logging( ): await bootstrap.async_enable_logging(hass) mock_async_activate_log_queue_handler.assert_called_once() - assert len(glob.glob(config_log_file_pattern)) > 0 + assert len(glob.glob(CONFIG_LOG_FILE)) > 0 mock_async_activate_log_queue_handler.reset_mock() await bootstrap.async_enable_logging( @@ -117,14 +123,61 @@ async def test_async_enable_logging( log_file="test.log", ) mock_async_activate_log_queue_handler.assert_called_once() - assert len(glob.glob(arg_log_file_pattern)) > 0 + assert len(glob.glob(ARG_LOG_FILE)) > 0 assert "Error rolling over log file" in caplog.text - for f in glob.glob(arg_log_file_pattern): - os.remove(f) - for f in glob.glob(config_log_file_pattern): - os.remove(f) + cleanup_log_files() + + +async def test_async_enable_logging_supervisor( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test to ensure the default log file is not created on Supervisor installations.""" + + # Ensure we start with a clean slate + cleanup_log_files() + assert len(glob.glob(CONFIG_LOG_FILE)) == 0 + assert len(glob.glob(ARG_LOG_FILE)) == 0 + + with ( + patch.dict(os.environ, {"SUPERVISOR": "1"}), + patch( + "homeassistant.bootstrap.async_activate_log_queue_handler" + ) as mock_async_activate_log_queue_handler, + patch("logging.getLogger"), + ): + await bootstrap.async_enable_logging(hass) + assert len(glob.glob(CONFIG_LOG_FILE)) == 0 + mock_async_activate_log_queue_handler.assert_called_once() + mock_async_activate_log_queue_handler.reset_mock() + + # Check that if the log file exists, it is renamed + def write_log_file(): + with open( + get_test_config_dir("home-assistant.log"), "w", encoding="utf8" + ) as f: + f.write("test") + + await hass.async_add_executor_job(write_log_file) + assert len(glob.glob(CONFIG_LOG_FILE)) == 1 + assert len(glob.glob(f"{CONFIG_LOG_FILE}.old")) == 0 + await bootstrap.async_enable_logging(hass) + assert len(glob.glob(CONFIG_LOG_FILE)) == 0 + assert len(glob.glob(f"{CONFIG_LOG_FILE}.old")) == 1 + mock_async_activate_log_queue_handler.assert_called_once() + mock_async_activate_log_queue_handler.reset_mock() + + await bootstrap.async_enable_logging( + hass, + log_rotate_days=5, + log_file="test.log", + ) + mock_async_activate_log_queue_handler.assert_called_once() + # Even on Supervisor, the log file should be created if it is explicitly specified + assert len(glob.glob(ARG_LOG_FILE)) > 0 + + cleanup_log_files() async def test_load_hassio(hass: HomeAssistant) -> None: