response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Fixture to allow overriding MQTT config.
def mqtt_config_entry_data() -> dict[str, Any] | None: """Fixture to allow overriding MQTT config.""" return None
Fixture to mock MQTT client.
def mqtt_client_mock(hass: HomeAssistant) -> Generator[MqttMockPahoClient, None, None]: """Fixture to mock MQTT client.""" mid: int = 0 def get_mid() -> int: nonlocal mid mid += 1 return mid class FakeInfo: """Class to fake MQTT info.""" def __init__(self, mid: int) -> None: self.mid = mid self.rc = 0 with patch("paho.mqtt.client.Client") as mock_client: # The below use a call_soon for the on_publish/on_subscribe/on_unsubscribe # callbacks to simulate the behavior of the real MQTT client which will # not be synchronous. @ha.callback def _async_fire_mqtt_message(topic, payload, qos, retain): async_fire_mqtt_message(hass, topic, payload, qos, retain) mid = get_mid() hass.loop.call_soon(mock_client.on_publish, 0, 0, mid) return FakeInfo(mid) def _subscribe(topic, qos=0): mid = get_mid() hass.loop.call_soon(mock_client.on_subscribe, 0, 0, mid) return (0, mid) def _unsubscribe(topic): mid = get_mid() hass.loop.call_soon(mock_client.on_unsubscribe, 0, 0, mid) return (0, mid) def _connect(*args, **kwargs): # Connect always calls reconnect once, but we # mock it out so we call reconnect to simulate # the behavior. mock_client.reconnect() hass.loop.call_soon_threadsafe( mock_client.on_connect, mock_client, None, 0, 0, 0 ) mock_client.on_socket_open( mock_client, None, Mock(fileno=Mock(return_value=-1)) ) mock_client.on_socket_register_write( mock_client, None, Mock(fileno=Mock(return_value=-1)) ) return 0 mock_client = mock_client.return_value mock_client.connect.side_effect = _connect mock_client.subscribe.side_effect = _subscribe mock_client.unsubscribe.side_effect = _unsubscribe mock_client.publish.side_effect = _async_fire_mqtt_message yield mock_client
Fixture to parametrize the content of main configuration using mock_hass_config. To set a configuration, tests can be marked with: @pytest.mark.parametrize("hass_config", [{integration: {...}}]) Add the `mock_hass_config: None` fixture to the test.
def hass_config() -> ConfigType: """Fixture to parametrize the content of main configuration using mock_hass_config. To set a configuration, tests can be marked with: @pytest.mark.parametrize("hass_config", [{integration: {...}}]) Add the `mock_hass_config: None` fixture to the test. """ return {}
Fixture to mock the content of main configuration. Patches homeassistant.config.load_yaml_config_file and hass.config_entries with `hass_config` as parameterized.
def mock_hass_config( hass: HomeAssistant, hass_config: ConfigType ) -> Generator[None, None, None]: """Fixture to mock the content of main configuration. Patches homeassistant.config.load_yaml_config_file and hass.config_entries with `hass_config` as parameterized. """ if hass_config: hass.config_entries = ConfigEntries(hass, hass_config) with patch("homeassistant.config.load_yaml_config_file", return_value=hass_config): yield
Fixture to parametrize the content of configuration.yaml file. To set yaml content, tests can be marked with: @pytest.mark.parametrize("hass_config_yaml", ["..."]) Add the `mock_hass_config_yaml: None` fixture to the test.
def hass_config_yaml() -> str: """Fixture to parametrize the content of configuration.yaml file. To set yaml content, tests can be marked with: @pytest.mark.parametrize("hass_config_yaml", ["..."]) Add the `mock_hass_config_yaml: None` fixture to the test. """ return ""
Fixture to parametrize multiple yaml configuration files. To set the YAML files to patch, tests can be marked with: @pytest.mark.parametrize( "hass_config_yaml_files", [{"configuration.yaml": "..."}] ) Add the `mock_hass_config_yaml: None` fixture to the test.
def hass_config_yaml_files(hass_config_yaml: str) -> dict[str, str]: """Fixture to parametrize multiple yaml configuration files. To set the YAML files to patch, tests can be marked with: @pytest.mark.parametrize( "hass_config_yaml_files", [{"configuration.yaml": "..."}] ) Add the `mock_hass_config_yaml: None` fixture to the test. """ return {YAML_CONFIG_FILE: hass_config_yaml}
Fixture to mock the content of the yaml configuration files. Patches yaml configuration files using the `hass_config_yaml` and `hass_config_yaml_files` fixtures.
def mock_hass_config_yaml( hass: HomeAssistant, hass_config_yaml_files: dict[str, str] ) -> Generator[None, None, None]: """Fixture to mock the content of the yaml configuration files. Patches yaml configuration files using the `hass_config_yaml` and `hass_config_yaml_files` fixtures. """ with patch_yaml_files(hass_config_yaml_files): yield
Mock network.
def mock_network() -> Generator[None, None, None]: """Mock network.""" with patch( "homeassistant.components.network.util.ifaddr.get_adapters", return_value=[ Mock( nice_name="eth0", ips=[Mock(is_IPv6=False, ip="10.10.10.10", network_prefix=24)], index=0, ) ], ): yield
Mock network util's async_get_source_ip.
def mock_get_source_ip() -> Generator[None, None, None]: """Mock network util's async_get_source_ip.""" with patch( "homeassistant.components.network.util.async_get_source_ip", return_value="10.10.10.10", ): yield
Mock zeroconf.
def mock_zeroconf() -> Generator[None, None, None]: """Mock zeroconf.""" from zeroconf import DNSCache # pylint: disable=import-outside-toplevel with ( patch("homeassistant.components.zeroconf.HaZeroconf", autospec=True) as mock_zc, patch("homeassistant.components.zeroconf.AsyncServiceBrowser", autospec=True), ): zc = mock_zc.return_value # DNSCache has strong Cython type checks, and MagicMock does not work # so we must mock the class directly zc.cache = DNSCache() yield mock_zc
Mock AsyncZeroconf.
def mock_async_zeroconf(mock_zeroconf: None) -> Generator[None, None, None]: """Mock AsyncZeroconf.""" from zeroconf import DNSCache, Zeroconf # pylint: disable=import-outside-toplevel from zeroconf.asyncio import ( # pylint: disable=import-outside-toplevel AsyncZeroconf, ) with patch( "homeassistant.components.zeroconf.HaAsyncZeroconf", spec=AsyncZeroconf ) as mock_aiozc: zc = mock_aiozc.return_value zc.async_unregister_service = AsyncMock() zc.async_register_service = AsyncMock() zc.async_update_service = AsyncMock() zc.zeroconf = Mock(spec=Zeroconf) zc.zeroconf.async_wait_for_start = AsyncMock() # DNSCache has strong Cython type checks, and MagicMock does not work # so we must mock the class directly zc.zeroconf.cache = DNSCache() zc.zeroconf.done = False zc.async_close = AsyncMock() zc.ha_async_close = AsyncMock() yield zc
Enable custom integrations defined in the test dir.
def enable_custom_integrations(hass: HomeAssistant) -> None: """Enable custom integrations defined in the test dir.""" hass.data.pop(loader.DATA_CUSTOM_COMPONENTS)
Fixture to control enabling of recorder's statistics compilation. To enable statistics, tests can be marked with: @pytest.mark.parametrize("enable_statistics", [True])
def enable_statistics() -> bool: """Fixture to control enabling of recorder's statistics compilation. To enable statistics, tests can be marked with: @pytest.mark.parametrize("enable_statistics", [True]) """ return False
Fixture to control enabling of recorder's statistics table validation. To enable statistics table validation, tests can be marked with: @pytest.mark.parametrize("enable_schema_validation", [True])
def enable_schema_validation() -> bool: """Fixture to control enabling of recorder's statistics table validation. To enable statistics table validation, tests can be marked with: @pytest.mark.parametrize("enable_schema_validation", [True]) """ return False
Fixture to control enabling of recorder's nightly purge job. To enable nightly purging, tests can be marked with: @pytest.mark.parametrize("enable_nightly_purge", [True])
def enable_nightly_purge() -> bool: """Fixture to control enabling of recorder's nightly purge job. To enable nightly purging, tests can be marked with: @pytest.mark.parametrize("enable_nightly_purge", [True]) """ return False
Fixture to control enabling of recorder's context id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_context_ids", [True])
def enable_migrate_context_ids() -> bool: """Fixture to control enabling of recorder's context id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_context_ids", [True]) """ return False
Fixture to control enabling of recorder's event type id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
def enable_migrate_event_type_ids() -> bool: """Fixture to control enabling of recorder's event type id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) """ return False
Fixture to control enabling of recorder's entity_id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_entity_ids", [True])
def enable_migrate_entity_ids() -> bool: """Fixture to control enabling of recorder's entity_id migration. To enable context id migration, tests can be marked with: @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) """ return False
Fixture to override recorder config. To override the config, tests can be marked with: @pytest.mark.parametrize("recorder_config", [{...}])
def recorder_config() -> dict[str, Any] | None: """Fixture to override recorder config. To override the config, tests can be marked with: @pytest.mark.parametrize("recorder_config", [{...}]) """ return None
Prepare a default database for tests and return a connection URL.
def recorder_db_url( pytestconfig: pytest.Config, hass_fixture_setup: list[bool], ) -> Generator[str, None, None]: """Prepare a default database for tests and return a connection URL.""" assert not hass_fixture_setup db_url = cast(str, pytestconfig.getoption("dburl")) if db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy_utils charset = "utf8mb4' COLLATE = 'utf8mb4_unicode_ci" assert not sqlalchemy_utils.database_exists(db_url) sqlalchemy_utils.create_database(db_url, encoding=charset) elif db_url.startswith("postgresql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy_utils assert not sqlalchemy_utils.database_exists(db_url) sqlalchemy_utils.create_database(db_url, encoding="utf8") yield db_url if db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy as sa made_url = sa.make_url(db_url) db = made_url.database engine = sa.create_engine(db_url) # Check for any open connections to the database before dropping it # to ensure that InnoDB does not deadlock. with engine.begin() as connection: query = sa.text( "select id FROM information_schema.processlist WHERE db=:db and id != CONNECTION_ID()" ) rows = connection.execute(query, parameters={"db": db}).fetchall() if rows: raise RuntimeError( f"Unable to drop database {db} because it is in use by {rows}" ) engine.dispose() sqlalchemy_utils.drop_database(db_url) elif db_url.startswith("postgresql://"): sqlalchemy_utils.drop_database(db_url)
Home Assistant fixture with in-memory recorder.
def hass_recorder( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, enable_schema_validation: bool, enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, hass_storage, ) -> Generator[Callable[..., HomeAssistant], None, None]: """Home Assistant fixture with in-memory recorder.""" # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder # pylint: disable-next=import-outside-toplevel from homeassistant.components.recorder import migration with get_test_home_assistant() as hass: nightly = ( recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None ) stats = ( recorder.Recorder.async_periodic_statistics if enable_statistics else None ) compile_missing = ( recorder.Recorder._schedule_compile_missing_statistics if enable_statistics else None ) schema_validate = ( migration._find_schema_errors if enable_schema_validation else itertools.repeat(set()) ) migrate_states_context_ids = ( recorder.Recorder._migrate_states_context_ids if enable_migrate_context_ids else None ) migrate_events_context_ids = ( recorder.Recorder._migrate_events_context_ids if enable_migrate_context_ids else None ) migrate_event_type_ids = ( recorder.Recorder._migrate_event_type_ids if enable_migrate_event_type_ids else None ) migrate_entity_ids = ( recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None ) with ( patch( "homeassistant.components.recorder.Recorder.async_nightly_tasks", side_effect=nightly, autospec=True, ), patch( "homeassistant.components.recorder.Recorder.async_periodic_statistics", side_effect=stats, autospec=True, ), patch( "homeassistant.components.recorder.migration._find_schema_errors", side_effect=schema_validate, autospec=True, ), patch( "homeassistant.components.recorder.Recorder._migrate_events_context_ids", side_effect=migrate_events_context_ids, autospec=True, ), patch( "homeassistant.components.recorder.Recorder._migrate_states_context_ids", side_effect=migrate_states_context_ids, autospec=True, ), patch( "homeassistant.components.recorder.Recorder._migrate_event_type_ids", side_effect=migrate_event_type_ids, autospec=True, ), patch( "homeassistant.components.recorder.Recorder._migrate_entity_ids", side_effect=migrate_entity_ids, autospec=True, ), patch( "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", side_effect=compile_missing, autospec=True, ), ): def setup_recorder( *, config: dict[str, Any] | None = None, timezone: str | None = None ) -> HomeAssistant: """Set up with params.""" if timezone is not None: hass.config.set_time_zone(timezone) init_recorder_component(hass, config, recorder_db_url) hass.start() hass.block_till_done() hass.data[recorder.DATA_INSTANCE].block_till_done() return hass yield setup_recorder hass.stop()
Fixture to mock bluetooth adapters.
def mock_bluetooth_adapters() -> Generator[None, None, None]: """Fixture to mock bluetooth adapters.""" with ( patch("bluetooth_auto_recovery.recover_adapter"), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch("bluetooth_adapters.systems.linux.LinuxAdapters.refresh"), patch( "bluetooth_adapters.systems.linux.LinuxAdapters.adapters", { "hci0": { "address": "00:00:00:00:00:01", "hw_version": "usb:v1D6Bp0246d053F", "passive_scan": False, "sw_version": "homeassistant", "manufacturer": "ACME", "product": "Bluetooth Adapter 5.0", "product_id": "aa01", "vendor_id": "cc01", }, }, ), ): yield
Fixture to mock starting the bleak scanner.
def mock_bleak_scanner_start() -> Generator[MagicMock, None, None]: """Fixture to mock starting the bleak scanner.""" # Late imports to avoid loading bleak unless we need it # pylint: disable-next=import-outside-toplevel from habluetooth import scanner as bluetooth_scanner # We need to drop the stop method from the object since we patched # out start and this fixture will expire before the stop method is called # when EVENT_HOMEASSISTANT_STOP is fired. bluetooth_scanner.OriginalBleakScanner.stop = AsyncMock() # type: ignore[assignment] with ( patch.object( bluetooth_scanner.OriginalBleakScanner, "start", ) as mock_bleak_scanner_start, patch.object(bluetooth_scanner, "HaScanner"), ): yield mock_bleak_scanner_start
Mock as if we're calling code from inside an integration.
def mock_integration_frame() -> Generator[Mock, None, None]: """Mock as if we're calling code from inside an integration.""" correct_frame = Mock( filename="/home/paulus/homeassistant/components/hue/light.py", lineno="23", line="self.light.is_on", ) with ( patch( "homeassistant.helpers.frame.linecache.getline", return_value=correct_frame.line, ), patch( "homeassistant.helpers.frame.get_current_frame", return_value=extract_stack_to_frame( [ Mock( filename="/home/paulus/homeassistant/core.py", lineno="23", line="do_something()", ), correct_frame, Mock( filename="/home/paulus/aiohue/lights.py", lineno="2", line="something()", ), ] ), ), ): yield correct_frame
Mock out bluetooth from starting.
def mock_bluetooth( mock_bleak_scanner_start: MagicMock, mock_bluetooth_adapters: None ) -> None: """Mock out bluetooth from starting."""
Return the category registry from the current hass instance.
def category_registry(hass: HomeAssistant) -> cr.CategoryRegistry: """Return the category registry from the current hass instance.""" return cr.async_get(hass)
Return the area registry from the current hass instance.
def area_registry(hass: HomeAssistant) -> ar.AreaRegistry: """Return the area registry from the current hass instance.""" return ar.async_get(hass)
Return the device registry from the current hass instance.
def device_registry(hass: HomeAssistant) -> dr.DeviceRegistry: """Return the device registry from the current hass instance.""" return dr.async_get(hass)
Return the entity registry from the current hass instance.
def entity_registry(hass: HomeAssistant) -> er.EntityRegistry: """Return the entity registry from the current hass instance.""" return er.async_get(hass)
Return the floor registry from the current hass instance.
def floor_registry(hass: HomeAssistant) -> fr.FloorRegistry: """Return the floor registry from the current hass instance.""" return fr.async_get(hass)
Return the issue registry from the current hass instance.
def issue_registry(hass: HomeAssistant) -> ir.IssueRegistry: """Return the issue registry from the current hass instance.""" return ir.async_get(hass)
Return the label registry from the current hass instance.
def label_registry(hass: HomeAssistant) -> lr.LabelRegistry: """Return the label registry from the current hass instance.""" return lr.async_get(hass)
Return snapshot assertion fixture with the Home Assistant extension.
def snapshot(snapshot: SnapshotAssertion) -> SnapshotAssertion: """Return snapshot assertion fixture with the Home Assistant extension.""" return snapshot.use_extension(HomeAssistantSnapshotExtension)
Make utcnow patchable by freezegun.
def _utcnow() -> datetime.datetime: """Make utcnow patchable by freezegun.""" return datetime.datetime.now(datetime.UTC)
Make monotonic patchable by freezegun.
def _monotonic() -> float: """Make monotonic patchable by freezegun.""" return time.monotonic()
Test deprecated aliases.
def test_deprecated_aliases( caplog: pytest.LogCaptureFixture, module: ModuleType, replacement: Any, breaks_in_ha_version: str, ) -> None: """Test deprecated aliases.""" alias_name = replacement.__name__ import_and_test_deprecated_alias( caplog, module, alias_name, replacement, breaks_in_ha_version, )
Apply the storage mock.
def apply_mock_storage(hass_storage: dict[str, Any]) -> None: """Apply the storage mock."""
Mock HTTP start and stop.
def mock_http_start_stop() -> Generator[None, None, None]: """Mock HTTP start and stop.""" with ( patch("homeassistant.components.http.start_http_server_and_save_config"), patch("homeassistant.components.http.HomeAssistantHTTP.stop"), ): yield
Mock is_virtual_env.
def mock_is_virtual_env() -> Generator[Mock, None, None]: """Mock is_virtual_env.""" with patch( "homeassistant.bootstrap.is_virtual_env", return_value=False ) as is_virtual_env: yield is_virtual_env
Mock enable logging.
def mock_enable_logging() -> Generator[Mock, None, None]: """Mock enable logging.""" with patch("homeassistant.bootstrap.async_enable_logging") as enable_logging: yield enable_logging
Mock enable logging.
def mock_mount_local_lib_path() -> Generator[AsyncMock, None, None]: """Mock enable logging.""" with patch( "homeassistant.bootstrap.async_mount_local_lib_path" ) as mount_local_lib_path: yield mount_local_lib_path
Mock enable logging.
def mock_process_ha_config_upgrade() -> Generator[Mock, None, None]: """Mock enable logging.""" with patch( "homeassistant.config.process_ha_config_upgrade" ) as process_ha_config_upgrade: yield process_ha_config_upgrade
Mock enable logging.
def mock_ensure_config_exists() -> Generator[AsyncMock, None, None]: """Mock enable logging.""" with patch( "homeassistant.config.async_ensure_config_exists", return_value=True ) as ensure_config_exists: yield ensure_config_exists
Test that shouldRollover always returns False.
def test_should_rollover_is_always_false(): """Test that shouldRollover always returns False.""" assert ( bootstrap._RotatingFileHandlerWithoutShouldRollOver( "any.log", delay=True ).shouldRollover(Mock()) is False )
Create an empty file.
def create_file(path): """Create an empty file.""" with open(path, "w"): pass
Clean up.
def teardown(): """Clean up.""" yield if os.path.isfile(YAML_PATH): os.remove(YAML_PATH) if os.path.isfile(SECRET_PATH): os.remove(SECRET_PATH) if os.path.isfile(VERSION_PATH): os.remove(VERSION_PATH) if os.path.isfile(AUTOMATIONS_PATH): os.remove(AUTOMATIONS_PATH) if os.path.isfile(SCRIPTS_PATH): os.remove(SCRIPTS_PATH) if os.path.isfile(SCENES_PATH): os.remove(SCENES_PATH) if os.path.isfile(SAFE_MODE_PATH): os.remove(SAFE_MODE_PATH)
Test that loading an empty file returns an empty dict.
def test_load_yaml_config_converts_empty_files_to_dict() -> None: """Test that loading an empty file returns an empty dict.""" create_file(YAML_PATH) assert isinstance(config_util.load_yaml_config_file(YAML_PATH), dict)
Test error raised when YAML file is not a dict.
def test_load_yaml_config_raises_error_if_not_dict() -> None: """Test error raised when YAML file is not a dict.""" with open(YAML_PATH, "w") as fp: fp.write("5") with pytest.raises(HomeAssistantError): config_util.load_yaml_config_file(YAML_PATH)
Test error raised if invalid YAML.
def test_load_yaml_config_raises_error_if_malformed_yaml() -> None: """Test error raised if invalid YAML.""" with open(YAML_PATH, "w") as fp: fp.write(":-") with pytest.raises(HomeAssistantError): config_util.load_yaml_config_file(YAML_PATH)
Test error raised if unsafe YAML.
def test_load_yaml_config_raises_error_if_unsafe_yaml() -> None: """Test error raised if unsafe YAML.""" with open(YAML_PATH, "w") as fp: fp.write("- !!python/object/apply:os.system []") with ( patch.object(os, "system") as system_mock, contextlib.suppress(HomeAssistantError), ): config_util.load_yaml_config_file(YAML_PATH) assert len(system_mock.mock_calls) == 0 # Here we validate that the test above is a good test # since previously the syntax was not valid with open(YAML_PATH) as fp, patch.object(os, "system") as system_mock: list(yaml.unsafe_load_all(fp)) assert len(system_mock.mock_calls) == 1
Test removal of library.
def test_load_yaml_config_preserves_key_order() -> None: """Test removal of library.""" with open(YAML_PATH, "w") as fp: fp.write("hello: 2\n") fp.write("world: 1\n") assert [("hello", 2), ("world", 1)] == list( config_util.load_yaml_config_file(YAML_PATH).items() )
Test core config schema.
def test_core_config_schema() -> None: """Test core config schema.""" for value in ( {CONF_UNIT_SYSTEM: "K"}, {"time_zone": "non-exist"}, {"latitude": "91"}, {"longitude": -181}, {"external_url": "not an url"}, {"internal_url": "not an url"}, {"currency", 100}, {"customize": "bla"}, {"customize": {"light.sensor": 100}}, {"customize": {"entity_id": []}}, {"country": "xx"}, {"language": "xx"}, ): with pytest.raises(MultipleInvalid): config_util.CORE_CONFIG_SCHEMA(value) config_util.CORE_CONFIG_SCHEMA( { "name": "Test name", "latitude": "-23.45", "longitude": "123.45", "external_url": "https://www.example.com", "internal_url": "http://example.local", CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC, "currency": "USD", "customize": {"sensor.temperature": {"hidden": True}}, "country": "SE", "language": "sv", } )
Test that we warn for internal/external URL with path.
def test_core_config_schema_internal_external_warning( caplog: pytest.LogCaptureFixture, ) -> None: """Test that we warn for internal/external URL with path.""" config_util.CORE_CONFIG_SCHEMA( { "external_url": "https://www.example.com/bla", "internal_url": "http://example.local/yo", } ) assert "Invalid external_url set" in caplog.text assert "Invalid internal_url set" in caplog.text
Test basic customize config validation.
def test_customize_dict_schema() -> None: """Test basic customize config validation.""" values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"}) for val in values: with pytest.raises(MultipleInvalid): config_util.CUSTOMIZE_DICT_SCHEMA(val) assert config_util.CUSTOMIZE_DICT_SCHEMA( {ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"} ) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False}
Test that customize_glob preserves order.
def test_customize_glob_is_ordered() -> None: """Test that customize_glob preserves order.""" conf = config_util.CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()}) assert isinstance(conf["customize_glob"], OrderedDict)
Test removal of library on upgrade from before 0.50.
def test_remove_lib_on_upgrade( mock_docker, mock_os, mock_shutil, hass: HomeAssistant ) -> None: """Test removal of library on upgrade from before 0.50.""" ha_version = "0.49.0" mock_os.path.isdir = mock.Mock(return_value=True) mock_open = mock.mock_open() with patch("homeassistant.config.open", mock_open, create=True): opened_file = mock_open.return_value opened_file.readline.return_value = ha_version hass.config.path = mock.Mock() config_util.process_ha_config_upgrade(hass) hass_path = hass.config.path.return_value assert mock_os.path.isdir.call_count == 1 assert mock_os.path.isdir.call_args == mock.call(hass_path) assert mock_shutil.rmtree.call_count == 1 assert mock_shutil.rmtree.call_args == mock.call(hass_path)
Test removal of library on upgrade from before 0.94 and in Docker.
def test_remove_lib_on_upgrade_94( mock_docker, mock_os, mock_shutil, hass: HomeAssistant ) -> None: """Test removal of library on upgrade from before 0.94 and in Docker.""" ha_version = "0.93.0.dev0" mock_os.path.isdir = mock.Mock(return_value=True) mock_open = mock.mock_open() with patch("homeassistant.config.open", mock_open, create=True): opened_file = mock_open.return_value opened_file.readline.return_value = ha_version hass.config.path = mock.Mock() config_util.process_ha_config_upgrade(hass) hass_path = hass.config.path.return_value assert mock_os.path.isdir.call_count == 1 assert mock_os.path.isdir.call_args == mock.call(hass_path) assert mock_shutil.rmtree.call_count == 1 assert mock_shutil.rmtree.call_args == mock.call(hass_path)
Test update of version on upgrade.
def test_process_config_upgrade(hass: HomeAssistant) -> None: """Test update of version on upgrade.""" ha_version = "0.92.0" mock_open = mock.mock_open() with ( patch("homeassistant.config.open", mock_open, create=True), patch.object(config_util, "__version__", "0.91.0"), ): opened_file = mock_open.return_value opened_file.readline.return_value = ha_version config_util.process_ha_config_upgrade(hass) assert opened_file.write.call_count == 1 assert opened_file.write.call_args == mock.call("0.91.0")
Test no update of version on no upgrade.
def test_config_upgrade_same_version(hass: HomeAssistant) -> None: """Test no update of version on no upgrade.""" ha_version = __version__ mock_open = mock.mock_open() with patch("homeassistant.config.open", mock_open, create=True): opened_file = mock_open.return_value opened_file.readline.return_value = ha_version config_util.process_ha_config_upgrade(hass) assert opened_file.write.call_count == 0
Test update of version on upgrade, with no version file.
def test_config_upgrade_no_file(hass: HomeAssistant) -> None: """Test update of version on upgrade, with no version file.""" mock_open = mock.mock_open() mock_open.side_effect = [FileNotFoundError(), mock.DEFAULT, mock.DEFAULT] with patch("homeassistant.config.open", mock_open, create=True): opened_file = mock_open.return_value config_util.process_ha_config_upgrade(hass) assert opened_file.write.call_count == 1 assert opened_file.write.call_args == mock.call(__version__)
Patch _merge_log_error from packages.
def merge_log_err(hass): """Patch _merge_log_error from packages.""" with patch("homeassistant.config._LOGGER.error") as logerr: yield logerr
Test identify config schema.
def test_identify_config_schema(domain, schema, expected) -> None: """Test identify config schema.""" assert ( config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema)) == expected )
Test the extraction of domain configuration.
def test_extract_domain_configs() -> None: """Test the extraction of domain configuration.""" config = { "zone": None, "zoner": None, "zone ": None, "zone Hallo": None, "zone 100": None, } assert {"zone", "zone Hallo", "zone 100"} == set( config_util.extract_domain_configs(config, "zone") )
Test config per platform method.
def test_config_per_platform() -> None: """Test config per platform method.""" config = OrderedDict( [ ("zone", {"platform": "hello"}), ("zoner", None), ("zone Hallo", [1, {"platform": "hello 2"}]), ("zone 100", None), ] ) assert [ ("hello", config["zone"]), (None, 1), ("hello 2", config["zone Hallo"][1]), ] == list(config_util.config_per_platform(config, "zone"))
Test extract_platform_integrations.
def test_extract_platform_integrations() -> None: """Test extract_platform_integrations.""" config = OrderedDict( [ (b"zone", {"platform": "not str"}), ("zone", {"platform": "hello"}), ("switch", {"platform": ["un", "hash", "able"]}), ("zonex", []), ("zoney", ""), ("notzone", {"platform": "nothello"}), ("zoner", None), ("zone Hallo", [1, {"platform": "hello 2"}]), ("zone 100", None), ("i n v a-@@", None), ("i n v a-@@", {"platform": "hello"}), ("zoneq", "pig"), ("zoneempty", {"platform": ""}), ] ) assert config_util.extract_platform_integrations(config, {"zone"}) == { "zone": {"hello", "hello 2"} } assert config_util.extract_platform_integrations(config, {"switch"}) == {} assert config_util.extract_platform_integrations(config, {"zonex"}) == {} assert config_util.extract_platform_integrations(config, {"zoney"}) == {} assert config_util.extract_platform_integrations( config, {"zone", "not_valid", "notzone"} ) == {"zone": {"hello 2", "hello"}, "notzone": {"nothello"}} assert config_util.extract_platform_integrations(config, {"zoneq"}) == {} assert config_util.extract_platform_integrations(config, {"zoneempty"}) == {}
Mock config flows.
def mock_handlers() -> Generator[None, None, None]: """Mock config flows.""" class MockFlowHandler(config_entries.ConfigFlow): """Define a mock flow handler.""" VERSION = 1 async def async_step_reauth(self, data): """Mock Reauth.""" return await self.async_step_reauth_confirm() async def async_step_reauth_confirm(self, user_input=None): """Test reauth confirm step.""" if user_input is None: return self.async_show_form(step_id="reauth_confirm") return self.async_abort(reason="test") async def async_step_reconfigure(self, data): """Mock Reauth.""" return await self.async_step_reauth_confirm() with patch.dict( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): yield
Test we log an error if trying to add same config entry twice.
def test_raise_trying_to_add_same_config_entry_twice( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test we log an error if trying to add same config entry twice.""" entry = MockConfigEntry(domain="test") entry.add_to_hass(hass) entry.add_to_hass(hass) assert f"An entry with the id {entry.entry_id} already exists" in caplog.text
Test module.__all__ is correctly set.
def test_all() -> None: """Test module.__all__ is correctly set.""" help_test_all(const)
Test deprecated constants.
def test_deprecated_constants( caplog: pytest.LogCaptureFixture, enum: Enum, constant_prefix: str, ) -> None: """Test deprecated constants.""" import_and_test_deprecated_constant_enum( caplog, const, enum, constant_prefix, "2025.1" )
Test deprecated constants, where the name is not the same as the enum value.
def test_deprecated_constant_name_changes( caplog: pytest.LogCaptureFixture, replacement: Enum, constant_name: str, ) -> None: """Test deprecated constants, where the name is not the same as the enum value.""" import_and_test_deprecated_constant( caplog, const, constant_name, f"{replacement.__class__.__name__}.{replacement.name}", replacement, "2025.1", )
Test split_entity_id.
def test_split_entity_id() -> None: """Test split_entity_id.""" assert ha.split_entity_id("domain.object_id") == ("domain", "object_id") with pytest.raises(ValueError): ha.split_entity_id("") with pytest.raises(ValueError): ha.split_entity_id(".") with pytest.raises(ValueError): ha.split_entity_id("just_domain") with pytest.raises(ValueError): ha.split_entity_id("empty_object_id.") with pytest.raises(ValueError): ha.split_entity_id(".empty_domain")
Add a coro to pending tasks.
def test_add_job_pending_tasks_coro(hass: HomeAssistant) -> None: """Add a coro to pending tasks.""" async def test_coro(): """Test Coro.""" for _ in range(2): hass.add_job(test_coro()) # Ensure add_job does not run immediately assert len(hass._tasks) == 0
Test events.
def test_event_eq() -> None: """Test events.""" now = dt_util.utcnow() data = {"some": "attr"} context = ha.Context() event1, event2 = ( ha.Event( "some_type", data, time_fired_timestamp=now.timestamp(), context=context ) for _ in range(2) ) assert event1.as_dict() == event2.as_dict()
Test time_fired and time_fired_timestamp.
def test_event_time() -> None: """Test time_fired and time_fired_timestamp.""" now = dt_util.utcnow() event = ha.Event( "some_type", {"some": "attr"}, time_fired_timestamp=now.timestamp() ) assert event.time_fired_timestamp == now.timestamp() assert event.time_fired == now
Test event JSON fragments.
def test_event_json_fragment() -> None: """Test event JSON fragments.""" now = dt_util.utcnow() data = {"some": "attr"} context = ha.Context() event1, event2 = ( ha.Event( "some_type", data, time_fired_timestamp=now.timestamp(), context=context ) for _ in range(2) ) # We are testing that the JSON fragments are the same when as_dict is called # after json_fragment or before. json_fragment_1 = event1.json_fragment as_dict_1 = event1.as_dict() as_dict_2 = event2.as_dict() json_fragment_2 = event2.json_fragment assert json_dumps(json_fragment_1) == json_dumps(json_fragment_2) # We also test that the as_dict is the same assert as_dict_1 == as_dict_2 # Finally we verify that the as_dict is a ReadOnlyDict # as is the data and context inside regardless of # if the json fragment was called first or not assert isinstance(as_dict_1, ReadOnlyDict) assert isinstance(as_dict_1["data"], ReadOnlyDict) assert isinstance(as_dict_1["context"], ReadOnlyDict) assert isinstance(as_dict_2, ReadOnlyDict) assert isinstance(as_dict_2["data"], ReadOnlyDict) assert isinstance(as_dict_2["context"], ReadOnlyDict)
Test that Event repr method works.
def test_event_repr() -> None: """Test that Event repr method works.""" assert str(ha.Event("TestEvent")) == "<Event TestEvent[L]>" assert ( str(ha.Event("TestEvent", {"beer": "nice"}, ha.EventOrigin.remote)) == "<Event TestEvent[R]: beer=nice>" )
Test an Event as dictionary.
def test_event_as_dict() -> None: """Test an Event as dictionary.""" event_type = "some_type" now = dt_util.utcnow() data = {"some": "attr"} event = ha.Event(event_type, data, ha.EventOrigin.local, now.timestamp()) expected = { "event_type": event_type, "data": data, "origin": "LOCAL", "time_fired": now.isoformat(), "context": { "id": event.context.id, "parent_id": None, "user_id": event.context.user_id, }, } assert event.as_dict() == expected # 2nd time to verify cache assert event.as_dict() == expected
Test a State as dictionary.
def test_state_as_dict() -> None: """Test a State as dictionary.""" last_time = datetime(1984, 12, 8, 12, 0, 0) state = ha.State( "happy.happy", "on", {"pig": "dog"}, last_changed=last_time, last_reported=last_time, last_updated=last_time, ) expected = { "context": { "id": state.context.id, "parent_id": None, "user_id": state.context.user_id, }, "entity_id": "happy.happy", "attributes": {"pig": "dog"}, "last_changed": last_time.isoformat(), "last_reported": last_time.isoformat(), "last_updated": last_time.isoformat(), "state": "on", } as_dict_1 = state.as_dict() assert isinstance(as_dict_1, ReadOnlyDict) assert isinstance(as_dict_1["attributes"], ReadOnlyDict) assert isinstance(as_dict_1["context"], ReadOnlyDict) assert as_dict_1 == expected # 2nd time to verify cache assert state.as_dict() == expected assert state.as_dict() is as_dict_1
Test a State as JSON.
def test_state_as_dict_json() -> None: """Test a State as JSON.""" last_time = datetime(1984, 12, 8, 12, 0, 0) state = ha.State( "happy.happy", "on", {"pig": "dog"}, context=ha.Context(id="01H0D6K3RFJAYAV2093ZW30PCW"), last_changed=last_time, last_reported=last_time, last_updated=last_time, ) expected = ( b'{"entity_id":"happy.happy","state":"on","attributes":{"pig":"dog"},' b'"last_changed":"1984-12-08T12:00:00","last_reported":"1984-12-08T12:00:00",' b'"last_updated":"1984-12-08T12:00:00",' b'"context":{"id":"01H0D6K3RFJAYAV2093ZW30PCW","parent_id":null,"user_id":null}}' ) as_dict_json_1 = state.as_dict_json assert as_dict_json_1 == expected # 2nd time to verify cache assert state.as_dict_json == expected assert state.as_dict_json is as_dict_json_1
Test state JSON fragments.
def test_state_json_fragment() -> None: """Test state JSON fragments.""" last_time = datetime(1984, 12, 8, 12, 0, 0) state1, state2 = ( ha.State( "happy.happy", "on", {"pig": "dog"}, context=ha.Context(id="01H0D6K3RFJAYAV2093ZW30PCW"), last_changed=last_time, last_reported=last_time, last_updated=last_time, ) for _ in range(2) ) # We are testing that the JSON fragments are the same when as_dict is called # after json_fragment or before. json_fragment_1 = state1.json_fragment as_dict_1 = state1.as_dict() as_dict_2 = state2.as_dict() json_fragment_2 = state2.json_fragment assert json_dumps(json_fragment_1) == json_dumps(json_fragment_2) # We also test that the as_dict is the same assert as_dict_1 == as_dict_2 # Finally we verify that the as_dict is a ReadOnlyDict # as is the attributes and context inside regardless of # if the json fragment was called first or not assert isinstance(as_dict_1, ReadOnlyDict) assert isinstance(as_dict_1["attributes"], ReadOnlyDict) assert isinstance(as_dict_1["context"], ReadOnlyDict) assert isinstance(as_dict_2, ReadOnlyDict) assert isinstance(as_dict_2["attributes"], ReadOnlyDict) assert isinstance(as_dict_2["context"], ReadOnlyDict)
Test a State as compressed state.
def test_state_as_compressed_state() -> None: """Test a State as compressed state.""" last_time = datetime(1984, 12, 8, 12, 0, 0, tzinfo=dt_util.UTC) state = ha.State( "happy.happy", "on", {"pig": "dog"}, last_updated=last_time, last_changed=last_time, ) expected = { "a": {"pig": "dog"}, "c": state.context.id, "lc": last_time.timestamp(), "s": "on", } as_compressed_state = state.as_compressed_state # We are not too concerned about these being ReadOnlyDict # since we don't expect them to be called by external callers assert as_compressed_state == expected # 2nd time to verify cache assert state.as_compressed_state == expected
Test a State as compressed state where last_changed is not last_updated.
def test_state_as_compressed_state_unique_last_updated() -> None: """Test a State as compressed state where last_changed is not last_updated.""" last_changed = datetime(1984, 12, 8, 11, 0, 0, tzinfo=dt_util.UTC) last_updated = datetime(1984, 12, 8, 12, 0, 0, tzinfo=dt_util.UTC) state = ha.State( "happy.happy", "on", {"pig": "dog"}, last_updated=last_updated, last_changed=last_changed, ) expected = { "a": {"pig": "dog"}, "c": state.context.id, "lc": last_changed.timestamp(), "lu": last_updated.timestamp(), "s": "on", } as_compressed_state = state.as_compressed_state # We are not too concerned about these being ReadOnlyDict # since we don't expect them to be called by external callers assert as_compressed_state == expected # 2nd time to verify cache assert state.as_compressed_state == expected
Test a State as a JSON compressed state.
def test_state_as_compressed_state_json() -> None: """Test a State as a JSON compressed state.""" last_time = datetime(1984, 12, 8, 12, 0, 0, tzinfo=dt_util.UTC) state = ha.State( "happy.happy", "on", {"pig": "dog"}, last_updated=last_time, last_changed=last_time, context=ha.Context(id="01H0D6H5K3SZJ3XGDHED1TJ79N"), ) expected = b'"happy.happy":{"s":"on","a":{"pig":"dog"},"c":"01H0D6H5K3SZJ3XGDHED1TJ79N","lc":471355200.0}' as_compressed_state = state.as_compressed_state_json # We are not too concerned about these being ReadOnlyDict # since we don't expect them to be called by external callers assert as_compressed_state == expected # 2nd time to verify cache assert state.as_compressed_state_json == expected assert state.as_compressed_state_json is as_compressed_state
Test state.init.
def test_state_init() -> None: """Test state.init.""" with pytest.raises(InvalidEntityFormatError): ha.State("invalid_entity_format", "test_state") with pytest.raises(InvalidStateError): ha.State("domain.long_state", "t" * 256)
Test domain.
def test_state_domain() -> None: """Test domain.""" state = ha.State("some_domain.hello", "world") assert state.domain == "some_domain"
Test object ID.
def test_state_object_id() -> None: """Test object ID.""" state = ha.State("domain.hello", "world") assert state.object_id == "hello"
Test if there is no friendly name.
def test_state_name_if_no_friendly_name_attr() -> None: """Test if there is no friendly name.""" state = ha.State("domain.hello_world", "world") assert state.name == "hello world"
Test if there is a friendly name.
def test_state_name_if_friendly_name_attr() -> None: """Test if there is a friendly name.""" name = "Some Unique Name" state = ha.State("domain.hello_world", "world", {ATTR_FRIENDLY_NAME: name}) assert state.name == name
Test conversion of dict.
def test_state_dict_conversion() -> None: """Test conversion of dict.""" state = ha.State("domain.hello", "world", {"some": "attr"}) assert state.as_dict() == ha.State.from_dict(state.as_dict()).as_dict()
Test conversion with wrong data.
def test_state_dict_conversion_with_wrong_data() -> None: """Test conversion with wrong data.""" assert ha.State.from_dict(None) is None assert ha.State.from_dict({"state": "yes"}) is None assert ha.State.from_dict({"entity_id": "yes"}) is None # Make sure invalid context data doesn't crash wrong_context = ha.State.from_dict( { "entity_id": "light.kitchen", "state": "on", "context": {"id": "123", "non-existing": "crash"}, } ) assert wrong_context is not None assert wrong_context.context.id == "123"
Test state.repr.
def test_state_repr() -> None: """Test state.repr.""" assert ( str(ha.State("happy.happy", "on", last_changed=datetime(1984, 12, 8, 12, 0, 0))) == "<state happy.happy=on @ 1984-12-08T12:00:00+00:00>" ) assert ( str( ha.State( "happy.happy", "on", {"brightness": 144}, last_changed=datetime(1984, 12, 8, 12, 0, 0), ) ) == "<state happy.happy=on; brightness=144 @ 1984-12-08T12:00:00+00:00>" )
Test ServiceCall repr.
def test_service_call_repr() -> None: """Test ServiceCall repr.""" call = ha.ServiceCall("homeassistant", "start") assert str(call) == f"<ServiceCall homeassistant.start (c:{call.context.id})>" call2 = ha.ServiceCall("homeassistant", "start", {"fast": "yes"}) assert ( str(call2) == f"<ServiceCall homeassistant.start (c:{call2.context.id}): fast=yes>" )
Test context init.
def test_context() -> None: """Test context init.""" c = ha.Context() assert c.user_id is None assert c.parent_id is None assert c.id is not None c = ha.Context(23, 100) assert c.user_id == 23 assert c.parent_id == 100 assert c.id is not None
Test context JSON fragments.
def test_context_json_fragment() -> None: """Test context JSON fragments.""" context1, context2 = (ha.Context(id="01H0D6K3RFJAYAV2093ZW30PCW") for _ in range(2)) # We are testing that the JSON fragments are the same when as_dict is called # after json_fragment or before. json_fragment_1 = context1.json_fragment as_dict_1 = context1.as_dict() as_dict_2 = context2.as_dict() json_fragment_2 = context2.json_fragment assert json_dumps(json_fragment_1) == json_dumps(json_fragment_2) # We also test that the as_dict is the same assert as_dict_1 == as_dict_2 # Finally we verify that the as_dict is a ReadOnlyDict # regardless of if the json fragment was called first or not assert isinstance(as_dict_1, ReadOnlyDict) assert isinstance(as_dict_2, ReadOnlyDict)
Test valid entity ID.
def test_valid_entity_id() -> None: """Test valid entity ID.""" for invalid in [ "_light.kitchen", ".kitchen", ".light.kitchen", "light_.kitchen", "light._kitchen", "light.", "light.kitchen__ceiling", "light.kitchen_yo_", "light.kitchen.", "Light.kitchen", "light.Kitchen", "lightkitchen", ]: assert not ha.valid_entity_id(invalid), invalid for valid in [ "1.a", "1light.kitchen", "a.1", "a.a", "input_boolean.hello_world_0123", "light.1kitchen", "light.kitchen", "light.something_yoo", ]: assert ha.valid_entity_id(valid), valid
Test valid domain.
def test_valid_domain() -> None: """Test valid domain.""" for invalid in [ "_light", ".kitchen", ".light.kitchen", "light_.kitchen", "._kitchen", "light.", "light.kitchen__ceiling", "light.kitchen_yo_", "light.kitchen.", "Light", ]: assert not ha.valid_domain(invalid), invalid for valid in [ "1", "1light", "a", "input_boolean", "light", ]: assert ha.valid_domain(valid), valid
Test timestamp functions for State.
def test_state_timestamps() -> None: """Test timestamp functions for State.""" now = dt_util.utcnow() state = ha.State( "light.bedroom", "on", {"brightness": 100}, last_changed=now, last_reported=now, last_updated=now, context=ha.Context(id="1234"), ) assert state.last_changed_timestamp == now.timestamp() assert state.last_changed_timestamp == now.timestamp() assert state.last_reported_timestamp == now.timestamp() assert state.last_reported_timestamp == now.timestamp() assert state.last_updated_timestamp == now.timestamp() assert state.last_updated_timestamp == now.timestamp()
Get the full name of an object in memory.
def _get_full_name(obj) -> str: """Get the full name of an object in memory.""" objtype = type(obj) name = objtype.__name__ if module := getattr(objtype, "__module__", None): return f"{module}.{name}" return name
Get all objects in memory with a specific type.
def _get_by_type(full_name: str) -> list[Any]: """Get all objects in memory with a specific type.""" return [obj for obj in gc.get_objects() if _get_full_name(obj) == full_name]
Test is_callback_check_partial matches HassJob.
def test_is_callback_check_partial(): """Test is_callback_check_partial matches HassJob.""" @ha.callback def callback_func(): pass def not_callback_func(): pass assert ha.is_callback(callback_func) assert HassJob(callback_func).job_type == ha.HassJobType.Callback assert ha.is_callback_check_partial(functools.partial(callback_func)) assert HassJob(functools.partial(callback_func)).job_type == ha.HassJobType.Callback assert ha.is_callback_check_partial( functools.partial(functools.partial(callback_func)) ) assert HassJob(functools.partial(functools.partial(callback_func))).job_type == ( ha.HassJobType.Callback ) assert not ha.is_callback_check_partial(not_callback_func) assert HassJob(not_callback_func).job_type == ha.HassJobType.Executor assert not ha.is_callback_check_partial(functools.partial(not_callback_func)) assert HassJob(functools.partial(not_callback_func)).job_type == ( ha.HassJobType.Executor ) # We check the inner function, not the outer one assert not ha.is_callback_check_partial( ha.callback(functools.partial(not_callback_func)) ) assert HassJob(ha.callback(functools.partial(not_callback_func))).job_type == ( ha.HassJobType.Executor )