response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Provide IDs for pytest parametrize.
def idfn(val): """Provide IDs for pytest parametrize.""" if isinstance(val, (datetime)): return val.strftime("%Y%m%d")
Return a list of mock select entities.
def mock_select_entities() -> list[MockSelectEntity]: """Return a list of mock select entities.""" return [ MockSelectEntity( name="select 1", unique_id="unique_select_1", options=["option 1", "option 2", "option 3"], current_option="option 1", ), MockSelectEntity( name="select 2", unique_id="unique_select_2", options=["option 1", "option 2", "option 3"], current_option=None, ), ]
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Mock Sense object for authenticatation.
def mock_sense(): """Mock Sense object for authenticatation.""" with patch( "homeassistant.components.sense.config_flow.ASyncSenseable" ) as mock_sense: mock_sense.return_value.authenticate = AsyncMock(return_value=True) mock_sense.return_value.validate_mfa = AsyncMock(return_value=True) mock_sense.return_value.sense_access_token = "ABC" mock_sense.return_value.sense_user_id = "123" mock_sense.return_value.sense_monitor_id = "456" mock_sense.return_value.device_id = "789" mock_sense.return_value.refresh_token = "XYZ" yield mock_sense
Load fixture with json data and return.
def load_json_from_fixture(load_data: str) -> SensiboData: """Load fixture with json data and return.""" json_data: dict[str, Any] = json.loads(load_data) return json_data
Load fixture with fixture data and return.
def load_data_from_fixture() -> str: """Load fixture with fixture data and return.""" return load_fixture("data.json", "sensibo")
Mock bluetooth for all tests in this module.
def mock_bluetooth(enable_bluetooth): """Mock bluetooth for all tests in this module."""
Get mock sensor entities.
def get_mock_sensor_entities() -> dict[str, MockSensor]: """Get mock sensor entities.""" return { device_class: MockSensor( name=f"{device_class} sensor", unique_id=f"unique_{device_class}", device_class=device_class, native_unit_of_measurement=UNITS_OF_MEASUREMENT.get(device_class), ) for device_class in SensorDeviceClass }
Stub copying the blueprints to the config folder.
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder."""
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Ensure device class constants are declared in device_condition module.
def test_matches_device_classes(device_class: SensorDeviceClass) -> None: """Ensure device class constants are declared in device_condition module.""" # Ensure it has corresponding CONF_IS_*** constant constant_name = { SensorDeviceClass.BATTERY: "CONF_IS_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_IS_CO", SensorDeviceClass.CO2: "CONF_IS_CO2", SensorDeviceClass.ENERGY_STORAGE: "CONF_IS_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_IS_VOLUME", }.get(device_class, f"CONF_IS_{device_class.value.upper()}") assert hasattr(device_condition, constant_name), f"Missing constant {constant_name}" # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "is_battery_level", SensorDeviceClass.ENERGY_STORAGE: "is_energy", SensorDeviceClass.VOLUME_STORAGE: "is_volume", }.get(device_class, f"is_{device_class.value}") assert getattr(device_condition, constant_name) == constant_value # Ensure it is present in ENTITY_CONDITIONS assert device_class in ENTITY_CONDITIONS # Ensure it is present in CONDITION_SCHEMA schema_types = ( device_condition.CONDITION_SCHEMA.validators[0].schema["type"].container ) assert constant_value in schema_types # Ensure it is present in string.json strings = load_json("homeassistant/components/sensor/strings.json") assert constant_value in strings["device_automation"]["condition_type"]
Stub copying the blueprints to the config folder.
def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder."""
Track calls to a mock service.
def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Ensure device class constants are declared in device_trigger module.
def test_matches_device_classes(device_class: SensorDeviceClass) -> None: """Ensure device class constants are declared in device_trigger module.""" # Ensure it has corresponding CONF_*** constant constant_name = { SensorDeviceClass.BATTERY: "CONF_BATTERY_LEVEL", SensorDeviceClass.CO: "CONF_CO", SensorDeviceClass.CO2: "CONF_CO2", SensorDeviceClass.ENERGY_STORAGE: "CONF_ENERGY", SensorDeviceClass.VOLUME_STORAGE: "CONF_VOLUME", }.get(device_class, f"CONF_{device_class.value.upper()}") assert hasattr(device_trigger, constant_name), f"Missing constant {constant_name}" # Ensure it has correct value constant_value = { SensorDeviceClass.BATTERY: "battery_level", SensorDeviceClass.ENERGY_STORAGE: "energy", SensorDeviceClass.VOLUME_STORAGE: "volume", }.get(device_class, device_class.value) assert getattr(device_trigger, constant_name) == constant_value # Ensure it is present in ENTITY_TRIGGERS assert device_class in ENTITY_TRIGGERS # Ensure it is present in TRIGGER_SCHEMA schema_types = device_trigger.TRIGGER_SCHEMA.validators[0].schema["type"].container assert constant_value in schema_types # Ensure it is present in string.json strings = load_json("homeassistant/components/sensor/strings.json") assert constant_value in strings["device_automation"]["trigger_type"]
Test async_parse_date_datetime.
def test_async_parse_datetime(caplog: pytest.LogCaptureFixture) -> None: """Test async_parse_date_datetime.""" entity_id = "sensor.timestamp" device_class = SensorDeviceClass.TIMESTAMP assert ( async_parse_date_datetime( "2021-12-12 12:12Z", entity_id, device_class ).isoformat() == "2021-12-12T12:12:00+00:00" ) assert not caplog.text # No timezone assert ( async_parse_date_datetime("2021-12-12 12:12", entity_id, device_class) is None ) assert "sensor.timestamp rendered timestamp without timezone" in caplog.text # Invalid timestamp assert async_parse_date_datetime("12 past 12", entity_id, device_class) is None assert "sensor.timestamp rendered invalid timestamp: 12 past 12" in caplog.text device_class = SensorDeviceClass.DATE caplog.clear() assert ( async_parse_date_datetime("2021-12-12", entity_id, device_class).isoformat() == "2021-12-12" ) assert not caplog.text # Invalid date assert async_parse_date_datetime("December 12th", entity_id, device_class) is None assert "sensor.timestamp rendered invalid date December 12th" in caplog.text
Make sure all number device classes are also available in SensorDeviceClass.
def test_device_classes_aligned() -> None: """Make sure all number device classes are also available in SensorDeviceClass.""" for device_class in NumberDeviceClass: assert hasattr(SensorDeviceClass, device_class.name) assert getattr(SensorDeviceClass, device_class.name).value == device_class.value
Mock config flow.
def config_flow_fixture(hass: HomeAssistant) -> Generator[None, None, None]: """Mock config flow.""" mock_platform(hass, f"{TEST_DOMAIN}.config_flow") with mock_config_flow(TEST_DOMAIN, MockFlow): yield
Test async_rounded_state on unregistered entity is passthrough.
def test_async_rounded_state_unregistered_entity_is_passthrough( hass: HomeAssistant, ) -> None: """Test async_rounded_state on unregistered entity is passthrough.""" hass.states.async_set("sensor.test", "1.004") state = hass.states.get("sensor.test") assert async_rounded_state(hass, "sensor.test", state) == "1.004" hass.states.async_set("sensor.test", "-0.0") state = hass.states.get("sensor.test") assert async_rounded_state(hass, "sensor.test", state) == "-0.0"
Test async_rounded_state on registered with display precision. The -0 should be dropped.
def test_async_rounded_state_registered_entity_with_display_precision( hass: HomeAssistant, ) -> None: """Test async_rounded_state on registered with display precision. The -0 should be dropped. """ entity_registry = er.async_get(hass) entry = entity_registry.async_get_or_create("sensor", "test", "very_unique") entity_registry.async_update_entity_options( entry.entity_id, "sensor", {"suggested_display_precision": 2, "display_precision": 4}, ) entity_id = entry.entity_id hass.states.async_set(entity_id, "1.004") state = hass.states.get(entity_id) assert async_rounded_state(hass, entity_id, state) == "1.0040" hass.states.async_set(entity_id, "-0.0") state = hass.states.get(entity_id) assert async_rounded_state(hass, entity_id, state) == "0.0000"
Test all numeric device classes have unit and state class.
def test_device_class_units_state_classes(hass: HomeAssistant) -> None: """Test all numeric device classes have unit and state class.""" # DEVICE_CLASS_UNITS should include all device classes except: # - SensorDeviceClass.MONETARY # - Device classes enumerated in NON_NUMERIC_DEVICE_CLASSES assert set(DEVICE_CLASS_UNITS) == set( SensorDeviceClass ) - NON_NUMERIC_DEVICE_CLASSES - {SensorDeviceClass.MONETARY} # DEVICE_CLASS_STATE_CLASSES should include all device classes assert set(DEVICE_CLASS_STATE_CLASSES) == set(SensorDeviceClass)
Test module.__all__ is correctly set.
def test_all(module: ModuleType) -> None: """Test module.__all__ is correctly set.""" help_test_all(module)
Test deprecated constants.
def test_deprecated_constants( caplog: pytest.LogCaptureFixture, enum: sensor.SensorStateClass, module: ModuleType, ) -> None: """Test deprecated constants.""" import_and_test_deprecated_constant_enum( caplog, module, enum, "STATE_CLASS_", "2025.1" )
Test deprecated constants.
def test_deprecated_constants_sensor_device_class( caplog: pytest.LogCaptureFixture, enum: sensor.SensorStateClass, ) -> None: """Test deprecated constants.""" import_and_test_deprecated_constant_enum( caplog, sensor, enum, "DEVICE_CLASS_", "2025.1" )
Set the time zone for the tests.
def set_time_zone(): """Set the time zone for the tests.""" # Set our timezone to CST/Regina so we can check calculations # This keeps UTC-6 all year round dt_util.set_default_time_zone(dt_util.get_time_zone("America/Regina")) yield dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
Test compiling hourly statistics.
def test_compile_hourly_statistics( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics with the some of the same last updated value. If the last updated value is the same we will have a zero duration.
def test_compile_hourly_statistics_with_some_same_last_updated( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics with the some of the same last updated value. If the last updated value is the same we will have a zero duration. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added entity_id = "sensor.test1" attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } attributes = dict(attributes) seq = [-10, 15, 30, 60] def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) four = three + timedelta(seconds=10 * 5) states = {entity_id: []} with freeze_time(one) as freezer: states[entity_id].append( set_state(entity_id, str(seq[0]), attributes=attributes) ) # Record two states at the exact same time freezer.move_to(two) states[entity_id].append( set_state(entity_id, str(seq[1]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[2]), attributes=attributes) ) freezer.move_to(three) states[entity_id].append( set_state(entity_id, str(seq[3]), attributes=attributes) ) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics with the all of the same last updated value. If the last updated value is the same we will have a zero duration.
def test_compile_hourly_statistics_with_all_same_last_updated( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics with the all of the same last updated value. If the last updated value is the same we will have a zero duration. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added entity_id = "sensor.test1" attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } attributes = dict(attributes) seq = [-10, 15, 30, 60] def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) four = three + timedelta(seconds=10 * 5) states = {entity_id: []} with freeze_time(two): states[entity_id].append( set_state(entity_id, str(seq[0]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[1]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[2]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[3]), attributes=attributes) ) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics when the only state at end of period.
def test_compile_hourly_statistics_only_state_is_and_end_of_period( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics when the only state at end of period.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added entity_id = "sensor.test1" attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } attributes = dict(attributes) seq = [-10, 15, 30, 60] def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) four = three + timedelta(seconds=10 * 5) end = zero + timedelta(minutes=5) states = {entity_id: []} with freeze_time(end): states[entity_id].append( set_state(entity_id, str(seq[0]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[1]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[2]), attributes=attributes) ) states[entity_id].append( set_state(entity_id, str(seq[3]), attributes=attributes) ) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_statistics_purged_state_changes( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, ) -> None: """Test compiling hourly statistics.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) mean = min_value = max_value = float(hist["sensor.test1"][-1].state) # Purge all states from the database with freeze_time(four): hass.services.call("recorder", "purge", {"keep_days": 0}) hass.block_till_done() wait_recording_done(hass) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert not hist do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min_value), "max": pytest.approx(max_value), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics for sensor with unit not matching device class.
def test_compile_hourly_statistics_wrong_unit( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, attributes, ) -> None: """Test compiling hourly statistics for sensor with unit not matching device class.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) attributes_tmp = dict(attributes) attributes_tmp["unit_of_measurement"] = "invalid" _, _states = record_states(hass, freezer, zero, "sensor.test2", attributes_tmp) states = {**states, **_states} attributes_tmp.pop("unit_of_measurement") _, _states = record_states(hass, freezer, zero, "sensor.test3", attributes_tmp) states = {**states, **_states} attributes_tmp = dict(attributes) attributes_tmp["state_class"] = "invalid" _, _states = record_states(hass, freezer, zero, "sensor.test4", attributes_tmp) states = {**states, **_states} attributes_tmp.pop("state_class") _, _states = record_states(hass, freezer, zero, "sensor.test5", attributes_tmp) states = {**states, **_states} attributes_tmp = dict(attributes) attributes_tmp["device_class"] = "invalid" _, _states = record_states(hass, freezer, zero, "sensor.test6", attributes_tmp) states = {**states, **_states} attributes_tmp.pop("device_class") _, _states = record_states(hass, freezer, zero, "sensor.test7", attributes_tmp) states = {**states, **_states} hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": "°C", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "°C", "unit_class": "temperature", }, { "display_unit_of_measurement": "invalid", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistic_id": "sensor.test2", "statistics_unit_of_measurement": "invalid", "unit_class": None, }, { "display_unit_of_measurement": None, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistic_id": "sensor.test3", "statistics_unit_of_measurement": None, "unit_class": "unitless", }, { "statistic_id": "sensor.test6", "display_unit_of_measurement": "°C", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "°C", "unit_class": "temperature", }, { "statistic_id": "sensor.test7", "display_unit_of_measurement": "°C", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "°C", "unit_class": "temperature", }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(13.050847), "min": pytest.approx(-10.0), "max": pytest.approx(30.0), "last_reset": None, "state": None, "sum": None, } ], "sensor.test2": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": 13.05084745762712, "min": -10.0, "max": 30.0, "last_reset": None, "state": None, "sum": None, } ], "sensor.test3": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": 13.05084745762712, "min": -10.0, "max": 30.0, "last_reset": None, "state": None, "sum": None, } ], "sensor.test6": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(13.050847), "min": pytest.approx(-10.0), "max": pytest.approx(30.0), "last_reset": None, "state": None, "sum": None, } ], "sensor.test7": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(13.050847), "min": pytest.approx(-10.0), "max": pytest.approx(30.0), "last_reset": None, "state": None, "sum": None, } ], } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_sum_statistics_amount_reset_every_state_change( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test compiling hourly statistics.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": state_class, "unit_of_measurement": state_unit, "last_reset": None, } seq = [10, 15, 15, 15, 20, 20, 20, 25] # Make sure the sequence has consecutive equal states assert seq[1] == seq[2] == seq[3] # Make sure the first and last state differ assert seq[0] != seq[-1] states = {"sensor.test1": []} with freeze_time(zero) as freezer: # Insert states for a 1st statistics period one = zero for i in range(len(seq)): one = one + timedelta(seconds=5) attributes = dict(attributes) attributes["last_reset"] = dt_util.as_local(one).isoformat() _states = record_meter_state( hass, freezer, one, "sensor.test1", attributes, seq[i : i + 1] ) states["sensor.test1"].extend(_states["sensor.test1"]) # Insert states for a 2nd statistics period two = zero + timedelta(minutes=5) for i in range(len(seq)): two = two + timedelta(seconds=5) attributes = dict(attributes) attributes["last_reset"] = dt_util.as_local(two).isoformat() _states = record_meter_state( hass, freezer, two, "sensor.test1", attributes, seq[i : i + 1] ) states["sensor.test1"].extend(_states["sensor.test1"]) hist = history.get_significant_states( hass, zero - timedelta.resolution, two + timedelta.resolution, hass.states.async_entity_ids(), significant_changes_only=False, ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=zero + timedelta(minutes=5)) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(dt_util.as_local(one)).timestamp(), "state": pytest.approx(factor * seq[7]), "sum": pytest.approx(factor * (sum(seq) - seq[0])), }, { "start": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(dt_util.as_local(two)).timestamp(), "state": pytest.approx(factor * seq[7]), "sum": pytest.approx(factor * (2 * sum(seq) - seq[0])), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_sum_statistics_amount_invalid_last_reset( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test compiling hourly statistics.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": state_class, "unit_of_measurement": state_unit, "last_reset": None, } seq = [10, 15, 15, 15, 20, 20, 20, 25] states = {"sensor.test1": []} # Insert states with freeze_time(zero) as freezer: one = zero for i in range(len(seq)): one = one + timedelta(seconds=5) attributes = dict(attributes) attributes["last_reset"] = dt_util.as_local(one).isoformat() if i == 3: attributes["last_reset"] = "festivus" # not a valid time _states = record_meter_state( hass, freezer, one, "sensor.test1", attributes, seq[i : i + 1] ) states["sensor.test1"].extend(_states["sensor.test1"]) hist = history.get_significant_states( hass, zero - timedelta.resolution, one + timedelta.resolution, hass.states.async_entity_ids(), significant_changes_only=False, ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(dt_util.as_local(one)).timestamp(), "state": pytest.approx(factor * seq[7]), "sum": pytest.approx(factor * (sum(seq) - seq[0] - seq[3])), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text assert "Ignoring invalid last reset 'festivus' for sensor.test1" in caplog.text
Test compiling hourly statistics with nan and inf states.
def test_compile_hourly_sum_statistics_nan_inf_state( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test compiling hourly statistics with nan and inf states.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": state_class, "unit_of_measurement": state_unit, "last_reset": None, } seq = [10, math.nan, 15, 15, 20, math.inf, 20, 10] states = {"sensor.test1": []} with freeze_time(zero) as freezer: one = zero for i in range(len(seq)): one = one + timedelta(seconds=5) attributes = dict(attributes) attributes["last_reset"] = dt_util.as_local(one).isoformat() _states = record_meter_state( hass, freezer, one, "sensor.test1", attributes, seq[i : i + 1] ) states["sensor.test1"].extend(_states["sensor.test1"]) hist = history.get_significant_states( hass, zero - timedelta.resolution, one + timedelta.resolution, hass.states.async_entity_ids(), significant_changes_only=False, ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(one).timestamp(), "state": pytest.approx(factor * seq[7]), "sum": pytest.approx( factor * (seq[2] + seq[3] + seq[4] + seq[6] + seq[7]) ), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics with negative states.
def test_compile_hourly_sum_statistics_negative_state( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, entity_id, warning_1, warning_2, state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, offset, ) -> None: """Test compiling hourly statistics with negative states.""" zero = dt_util.utcnow() hass = hass_recorder() hass.data.pop(loader.DATA_CUSTOM_COMPONENTS) mocksensor = MockSensor(name="custom_sensor") mocksensor._attr_should_poll = False setup_test_component_platform(hass, DOMAIN, [mocksensor], built_in=False) setup_component(hass, "homeassistant", {}) setup_component( hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} ) hass.block_till_done() attributes = { "device_class": device_class, "state_class": state_class, "unit_of_measurement": state_unit, } seq = [15, 16, 15, 16, 20, -20, 20, 10] states = {entity_id: []} offending_state = 5 if state := hass.states.get(entity_id): states[entity_id].append(state) offending_state = 6 one = zero with freeze_time(zero) as freezer: for i in range(len(seq)): one = one + timedelta(seconds=5) _states = record_meter_state( hass, freezer, one, entity_id, attributes, seq[i : i + 1] ) states[entity_id].extend(_states[entity_id]) hist = history.get_significant_states( hass, zero - timedelta.resolution, one + timedelta.resolution, hass.states.async_entity_ids(), significant_changes_only=False, ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)[entity_id], dict(hist)[entity_id] ) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert { "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistic_id": entity_id, "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } in statistic_ids stats = statistics_during_period(hass, zero, period="5minute") assert stats[entity_id] == [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(seq[7]), "sum": pytest.approx(offset + 15), # (20 - 15) + (10 - 0) }, ] assert "Error while processing event StatisticsTask" not in caplog.text state = states[entity_id][offending_state].state last_updated = states[entity_id][offending_state].last_updated.isoformat() assert ( f"Entity {entity_id} {warning_1}has state class total_increasing, but its state " f"is negative. Triggered by state {state} with last_updated set to {last_updated}." in caplog.text ) assert warning_2 in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_sum_statistics_total_no_reset( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test compiling hourly statistics.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "total", "unit_of_measurement": state_unit, } seq = [10, 15, 20, 10, 30, 40, 50, 60, 70] with freeze_time(period0) as freezer: four, eight, states = record_meter_states( hass, freezer, period0, "sensor.test1", attributes, seq ) wait_recording_done(hass) hist = history.get_significant_states( hass, period0 - timedelta.resolution, eight + timedelta.resolution, hass.states.async_entity_ids(), ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) do_adhoc_statistics(hass, start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[2]), "sum": pytest.approx(factor * 10.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[5]), "sum": pytest.approx(factor * 30.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[8]), "sum": pytest.approx(factor * 60.0), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_sum_statistics_total_increasing( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test compiling hourly statistics.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "total_increasing", "unit_of_measurement": state_unit, } seq = [10, 15, 20, 10, 30, 40, 50, 60, 70] with freeze_time(period0) as freezer: four, eight, states = record_meter_states( hass, freezer, period0, "sensor.test1", attributes, seq ) wait_recording_done(hass) hist = history.get_significant_states( hass, period0 - timedelta.resolution, eight + timedelta.resolution, hass.states.async_entity_ids(), ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) do_adhoc_statistics(hass, start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[2]), "sum": pytest.approx(factor * 10.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[5]), "sum": pytest.approx(factor * 50.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": None, "state": pytest.approx(factor * seq[8]), "sum": pytest.approx(factor * 80.0), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text assert "Detected new cycle for sensor.test1, last_reset set to" not in caplog.text assert "Compiling initial sum statistics for sensor.test1" in caplog.text assert "Detected new cycle for sensor.test1, value dropped" in caplog.text
Test small dips in sensor readings do not trigger a reset.
def test_compile_hourly_sum_statistics_total_increasing_small_dip( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, factor, ) -> None: """Test small dips in sensor readings do not trigger a reset.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "total_increasing", "unit_of_measurement": state_unit, } seq = [10, 15, 20, 19, 30, 40, 39, 60, 70] with freeze_time(period0) as freezer: four, eight, states = record_meter_states( hass, freezer, period0, "sensor.test1", attributes, seq ) wait_recording_done(hass) hist = history.get_significant_states( hass, period0 - timedelta.resolution, eight + timedelta.resolution, hass.states.async_entity_ids(), ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) assert ( "Entity sensor.test1 has state class total_increasing, but its state is not " "strictly increasing." ) not in caplog.text do_adhoc_statistics(hass, start=period2) wait_recording_done(hass) state = states["sensor.test1"][6].state previous_state = float(states["sensor.test1"][5].state) last_updated = states["sensor.test1"][6].last_updated.isoformat() assert ( "Entity sensor.test1 has state class total_increasing, but its state is not " f"strictly increasing. Triggered by state {state} ({previous_state}) with " f"last_updated set to {last_updated}. Please create a bug report at " "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue" ) in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, } ] stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "last_reset": None, "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "state": pytest.approx(factor * seq[2]), "sum": pytest.approx(factor * 10.0), }, { "last_reset": None, "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "state": pytest.approx(factor * seq[5]), "sum": pytest.approx(factor * 30.0), }, { "last_reset": None, "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "state": pytest.approx(factor * seq[8]), "sum": pytest.approx(factor * 60.0), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics.
def test_compile_hourly_energy_statistics_unsupported( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added sns1_attr = { "device_class": "energy", "state_class": "total", "unit_of_measurement": "kWh", "last_reset": None, } sns2_attr = {"device_class": "energy"} sns3_attr = {} seq1 = [10, 15, 20, 10, 30, 40, 50, 60, 70] seq2 = [110, 120, 130, 0, 30, 45, 55, 65, 75] seq3 = [0, 0, 5, 10, 30, 50, 60, 80, 90] with freeze_time(period0) as freezer: four, eight, states = record_meter_states( hass, freezer, period0, "sensor.test1", sns1_attr, seq1 ) _, _, _states = record_meter_states( hass, freezer, period0, "sensor.test2", sns2_attr, seq2 ) states = {**states, **_states} _, _, _states = record_meter_states( hass, freezer, period0, "sensor.test3", sns3_attr, seq3 ) states = {**states, **_states} wait_recording_done(hass) hist = history.get_significant_states( hass, period0 - timedelta.resolution, eight + timedelta.resolution, hass.states.async_entity_ids(), ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) do_adhoc_statistics(hass, start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "kWh", "unit_class": "energy", } ] stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(period0).timestamp(), "state": pytest.approx(20.0), "sum": pytest.approx(10.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(40.0), "sum": pytest.approx(40.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(70.0), "sum": pytest.approx(70.0), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling multiple hourly statistics.
def test_compile_hourly_energy_statistics_multiple( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture ) -> None: """Test compiling multiple hourly statistics.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added sns1_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None} sns2_attr = {**ENERGY_SENSOR_ATTRIBUTES, "last_reset": None} sns3_attr = { **ENERGY_SENSOR_ATTRIBUTES, "unit_of_measurement": "Wh", "last_reset": None, } seq1 = [10, 15, 20, 10, 30, 40, 50, 60, 70] seq2 = [110, 120, 130, 0, 30, 45, 55, 65, 75] seq3 = [0, 0, 5, 10, 30, 50, 60, 80, 90] with freeze_time(period0) as freezer: four, eight, states = record_meter_states( hass, freezer, period0, "sensor.test1", sns1_attr, seq1 ) _, _, _states = record_meter_states( hass, freezer, period0, "sensor.test2", sns2_attr, seq2 ) states = {**states, **_states} _, _, _states = record_meter_states( hass, freezer, period0, "sensor.test3", sns3_attr, seq3 ) states = {**states, **_states} wait_recording_done(hass) hist = history.get_significant_states( hass, period0 - timedelta.resolution, eight + timedelta.resolution, hass.states.async_entity_ids(), ) assert_multiple_states_equal_without_context_and_last_changed( dict(states)["sensor.test1"], dict(hist)["sensor.test1"] ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) do_adhoc_statistics(hass, start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "kWh", "unit_class": "energy", }, { "statistic_id": "sensor.test2", "display_unit_of_measurement": "kWh", "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "kWh", "unit_class": "energy", }, { "statistic_id": "sensor.test3", "display_unit_of_measurement": "Wh", "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "Wh", "unit_class": "energy", }, ] stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(period0).timestamp(), "state": pytest.approx(20.0), "sum": pytest.approx(10.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(40.0), "sum": pytest.approx(40.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(70.0), "sum": pytest.approx(70.0), }, ], "sensor.test2": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(period0).timestamp(), "state": pytest.approx(130.0), "sum": pytest.approx(20.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(45.0), "sum": pytest.approx(-65.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(75.0), "sum": pytest.approx(-35.0), }, ], "sensor.test3": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(period0).timestamp(), "state": pytest.approx(5.0), "sum": pytest.approx(5.0), }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(50.0), "sum": pytest.approx(60.0), }, { "start": process_timestamp(period2).timestamp(), "end": process_timestamp(period2_end).timestamp(), "max": None, "mean": None, "min": None, "last_reset": process_timestamp(four).timestamp(), "state": pytest.approx(90.0), "sum": pytest.approx(100.0), }, ], } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics, with no changes during the hour.
def test_compile_hourly_statistics_unchanged( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, value, ) -> None: """Test compiling hourly statistics, with no changes during the hour.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=four) wait_recording_done(hass) stats = statistics_during_period(hass, four, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(four).timestamp(), "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(value), "min": pytest.approx(value), "max": pytest.approx(value), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics, with the sensor being partially unavailable.
def test_compile_hourly_statistics_partially_unavailable( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added four, states = record_states_partially_unavailable( hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES ) hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(21.1864406779661), "min": pytest.approx(10.0), "max": pytest.approx(25.0), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics, with one sensor being unavailable. sensor.test1 is unavailable and should not have statistics generated sensor.test2 should have statistics generated
def test_compile_hourly_statistics_unavailable( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, value, ) -> None: """Test compiling hourly statistics, with one sensor being unavailable. sensor.test1 is unavailable and should not have statistics generated sensor.test2 should have statistics generated """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } four, states = record_states_partially_unavailable( hass, zero, "sensor.test1", attributes ) with freeze_time(zero) as freezer: _, _states = record_states(hass, freezer, zero, "sensor.test2", attributes) states = {**states, **_states} hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=four) wait_recording_done(hass) stats = statistics_during_period(hass, four, period="5minute") assert stats == { "sensor.test2": [ { "start": process_timestamp(four).timestamp(), "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(value), "min": pytest.approx(value), "max": pytest.approx(value), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics throws.
def test_compile_hourly_statistics_fails( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics throws.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added with patch( "homeassistant.components.sensor.recorder.compile_statistics", side_effect=Exception, ): do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "Error while processing event StatisticsTask" in caplog.text
Test listing future statistic ids.
def test_list_statistic_ids( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, statistic_type, ) -> None: """Test listing future statistic ids.""" hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "last_reset": 0, "state_class": state_class, "unit_of_measurement": state_unit, } hass.states.set("sensor.test1", 0, attributes=attributes) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": statistic_type == "mean", "has_sum": statistic_type == "sum", "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, }, ] for stat_type in ["mean", "sum", "dogs"]: statistic_ids = list_statistic_ids(hass, statistic_type=stat_type) if statistic_type == stat_type: assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": statistic_type == "mean", "has_sum": statistic_type == "sum", "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, }, ] else: assert statistic_ids == []
Test listing future statistic ids for unsupported sensor.
def test_list_statistic_ids_unsupported( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, _attributes, ) -> None: """Test listing future statistic ids for unsupported sensor.""" hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = dict(_attributes) hass.states.set("sensor.test1", 0, attributes=attributes) if "last_reset" in attributes: attributes.pop("unit_of_measurement") hass.states.set("last_reset.test2", 0, attributes=attributes) attributes = dict(_attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" hass.states.set("sensor.test3", 0, attributes=attributes) attributes.pop("unit_of_measurement") hass.states.set("sensor.test4", 0, attributes=attributes) attributes = dict(_attributes) attributes["state_class"] = "invalid" hass.states.set("sensor.test5", 0, attributes=attributes) attributes.pop("state_class") hass.states.set("sensor.test6", 0, attributes=attributes)
Test compiling hourly statistics where units change from one hour to the next. This tests the case where the recorder cannot convert between the units.
def test_compile_hourly_statistics_changing_units_1( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, state_unit2, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next. This tests the case where the recorder cannot convert between the units. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = state_unit2 four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "cannot be converted to the unit of previously" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert ( f"The unit of sensor.test1 ({state_unit2}) cannot be converted to the unit of " f"previously compiled statistics ({state_unit})" in caplog.text ) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where units change during an hour. This tests the behaviour when the sensor units are note supported by any unit converter.
def test_compile_hourly_statistics_changing_units_2( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics where units change during an hour. This tests the behaviour when the sensor units are note supported by any unit converter. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = "cats" four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": "cats", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "cats", "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == {} assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where units change from one hour to the next. This tests the behaviour when the sensor units are note supported by any unit converter.
def test_compile_hourly_statistics_changing_units_3( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next. This tests the behaviour when the sensor units are note supported by any unit converter. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] attributes["unit_of_measurement"] = "cats" four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" in caplog.text assert ( f"matches the unit of already compiled statistics ({state_unit})" in caplog.text ) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistics_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where units change from one hour to the next. This tests the case where the recorder can convert between the units.
def test_compile_hourly_statistics_convert_units_1( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, state_unit_1, state_unit_2, unit_class, mean, min, max, factor, ) -> None: """Test compiling hourly statistics where units change from one hour to the next. This tests the case where the recorder can convert between the units. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": None, "state_class": "measurement", "unit_of_measurement": state_unit_1, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes, seq=[0, 1, None], ) states["sensor.test1"] += _states["sensor.test1"] do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_1, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit_1, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } attributes["unit_of_measurement"] = state_unit_2 with freeze_time(four) as freezer: four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" not in caplog.text assert ( f"matches the unit of already compiled statistics ({state_unit_1})" not in caplog.text ) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_2, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit_1, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean * factor), "min": pytest.approx(min * factor), "max": pytest.approx(max * factor), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=15)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where units change from one hour to the next.
def test_compile_hourly_statistics_equivalent_units_1( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, state_unit2, unit_class, unit_class2, mean, mean2, min, max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = state_unit2 four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "cannot be converted to the unit of previously" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit2, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit2, "unit_class": unit_class2, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=15)).timestamp(), "mean": pytest.approx(mean2), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where units change during an hour.
def test_compile_hourly_statistics_equivalent_units_2( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, state_unit2, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics where units change during an hour.""" zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = state_unit2 four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" not in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp( zero + timedelta(seconds=30 * 5) ).timestamp(), "end": process_timestamp(zero + timedelta(seconds=30 * 15)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where device class changes from one hour to the next. Device class is ignored, meaning changing device class should not influence the statistics.
def test_compile_hourly_statistics_changing_device_class_1( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, statistic_unit, unit_class, mean1, mean2, min, max, ) -> None: """Test compiling hourly statistics where device class changes from one hour to the next. Device class is ignored, meaning changing device class should not influence the statistics. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added # Record some states for an initial period, the entity has no device class attributes = { "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean1), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } # Update device class and record additional states in the original UoM attributes["device_class"] = device_class with freeze_time(zero) as freezer: four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) # Run statistics again, additional statistics is generated do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean1), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=15)).timestamp(), "mean": pytest.approx(mean2), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } # Update device class and record additional states in a different UoM attributes["unit_of_measurement"] = statistic_unit with freeze_time(zero) as freezer: four, _states = record_states( hass, freezer, zero + timedelta(minutes=15), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( hass, freezer, zero + timedelta(minutes=20), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) # Run statistics again, additional statistics is generated do_adhoc_statistics(hass, start=zero + timedelta(minutes=20)) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": state_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean1), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=15)).timestamp(), "mean": pytest.approx(mean2), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=20)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=25)).timestamp(), "mean": pytest.approx(mean2), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where device class changes from one hour to the next. Device class is ignored, meaning changing device class should not influence the statistics.
def test_compile_hourly_statistics_changing_device_class_2( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistic_unit, unit_class, mean, mean2, min, max, ) -> None: """Test compiling hourly statistics where device class changes from one hour to the next. Device class is ignored, meaning changing device class should not influence the statistics. """ zero = dt_util.utcnow() hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added # Record some states for an initial period, the entity has a device class attributes = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } with freeze_time(zero) as freezer: four, states = record_states(hass, freezer, zero, "sensor.test1", attributes) do_adhoc_statistics(hass, start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistic_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, } ] } # Remove device class and record additional states attributes.pop("device_class") with freeze_time(zero) as freezer: four, _states = record_states( hass, freezer, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( hass, freezer, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, zero, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) # Run statistics again, additional statistics is generated do_adhoc_statistics(hass, start=zero + timedelta(minutes=10)) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": statistic_unit, "unit_class": unit_class, }, ] stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "end": process_timestamp(zero + timedelta(minutes=15)).timestamp(), "mean": pytest.approx(mean2), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics where state class changes.
def test_compile_hourly_statistics_changing_state_class( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture, device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max, ) -> None: """Test compiling hourly statistics where state class changes.""" period0 = dt_util.utcnow() period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period0 + timedelta(minutes=10) hass = hass_recorder() setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes_1 = { "device_class": device_class, "state_class": "measurement", "unit_of_measurement": state_unit, } attributes_2 = { "device_class": device_class, "state_class": "total_increasing", "unit_of_measurement": state_unit, } with freeze_time(period0) as freezer: four, states = record_states( hass, freezer, period0, "sensor.test1", attributes_1 ) do_adhoc_statistics(hass, start=period0) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": None, "unit_class": unit_class, }, ] metadata = get_metadata(hass, statistic_ids={"sensor.test1"}) assert metadata == { "sensor.test1": ( 1, { "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistic_id": "sensor.test1", "unit_of_measurement": None, }, ) } # Add more states, with changed state class with freeze_time(period1) as freezer: four, _states = record_states( hass, freezer, period1, "sensor.test1", attributes_2 ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states( hass, period0, four, hass.states.async_entity_ids() ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=period1) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": None, "unit_class": unit_class, }, ] metadata = get_metadata(hass, statistic_ids={"sensor.test1"}) assert metadata == { "sensor.test1": ( 1, { "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistic_id": "sensor.test1", "unit_of_measurement": None, }, ) } stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "start": process_timestamp(period0).timestamp(), "end": process_timestamp(period0_end).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), "last_reset": None, "state": None, "sum": None, }, { "start": process_timestamp(period1).timestamp(), "end": process_timestamp(period1_end).timestamp(), "mean": None, "min": None, "max": None, "last_reset": None, "state": pytest.approx(30.0), "sum": pytest.approx(30.0), }, ] } assert "Error while processing event StatisticsTask" not in caplog.text
Test compiling hourly statistics + monthly and daily summary.
def test_compile_statistics_hourly_daily_monthly_summary( hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics + monthly and daily summary.""" zero = dt_util.utcnow() # August 31st, 23:00 local time zero = zero.replace( year=2021, month=9, day=1, hour=5, minute=0, second=0, microsecond=0 ) with freeze_time(zero): hass = hass_recorder() # Remove this after dropping the use of the hass_recorder fixture hass.config.set_time_zone("America/Regina") instance = get_instance(hass) setup_component(hass, "sensor", {}) wait_recording_done(hass) # Wait for the sensor recorder platform to be added attributes = { "device_class": None, "state_class": "measurement", "unit_of_measurement": "%", } sum_attributes = { "device_class": None, "state_class": "total", "unit_of_measurement": "EUR", } def _weighted_average(seq, i, last_state): total = 0 duration = 0 durations = [50, 200, 45] if i > 0: total += last_state * 5 duration += 5 for j, dur in enumerate(durations): total += seq[j] * dur duration += dur return total / duration def _min(seq, last_state): if last_state is None: return min(seq) return min([*seq, last_state]) def _max(seq, last_state): if last_state is None: return max(seq) return max([*seq, last_state]) def _sum(seq, last_state, last_sum): if last_state is None: return seq[-1] - seq[0] return last_sum[-1] + seq[-1] - last_state # Generate states for two hours states = { "sensor.test1": [], "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], } expected_minima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_maxima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_averages = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_states = {"sensor.test4": []} expected_sums = {"sensor.test4": []} last_states = { "sensor.test1": None, "sensor.test2": None, "sensor.test3": None, "sensor.test4": None, } start = zero with freeze_time(start) as freezer: for i in range(24): seq = [-10, 15, 30] # test1 has same value in every period four, _states = record_states( hass, freezer, start, "sensor.test1", attributes, seq ) states["sensor.test1"] += _states["sensor.test1"] last_state = last_states["sensor.test1"] expected_minima["sensor.test1"].append(_min(seq, last_state)) expected_maxima["sensor.test1"].append(_max(seq, last_state)) expected_averages["sensor.test1"].append( _weighted_average(seq, i, last_state) ) last_states["sensor.test1"] = seq[-1] # test2 values change: min/max at the last state seq = [-10 * (i + 1), 15 * (i + 1), 30 * (i + 1)] four, _states = record_states( hass, freezer, start, "sensor.test2", attributes, seq ) states["sensor.test2"] += _states["sensor.test2"] last_state = last_states["sensor.test2"] expected_minima["sensor.test2"].append(_min(seq, last_state)) expected_maxima["sensor.test2"].append(_max(seq, last_state)) expected_averages["sensor.test2"].append( _weighted_average(seq, i, last_state) ) last_states["sensor.test2"] = seq[-1] # test3 values change: min/max at the first state seq = [-10 * (23 - i + 1), 15 * (23 - i + 1), 30 * (23 - i + 1)] four, _states = record_states( hass, freezer, start, "sensor.test3", attributes, seq ) states["sensor.test3"] += _states["sensor.test3"] last_state = last_states["sensor.test3"] expected_minima["sensor.test3"].append(_min(seq, last_state)) expected_maxima["sensor.test3"].append(_max(seq, last_state)) expected_averages["sensor.test3"].append( _weighted_average(seq, i, last_state) ) last_states["sensor.test3"] = seq[-1] # test4 values grow seq = [i, i + 0.5, i + 0.75] start_meter = start for j in range(len(seq)): _states = record_meter_state( hass, freezer, start_meter, "sensor.test4", sum_attributes, seq[j : j + 1], ) start_meter += timedelta(minutes=1) states["sensor.test4"] += _states["sensor.test4"] last_state = last_states["sensor.test4"] expected_states["sensor.test4"].append(seq[-1]) expected_sums["sensor.test4"].append( _sum(seq, last_state, expected_sums["sensor.test4"]) ) last_states["sensor.test4"] = seq[-1] start += timedelta(minutes=5) hist = history.get_significant_states( hass, zero - timedelta.resolution, four, hass.states.async_entity_ids(), significant_changes_only=False, ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) wait_recording_done(hass) # Generate 5-minute statistics for two hours start = zero for _ in range(24): do_adhoc_statistics(hass, start=start) wait_recording_done(hass) start += timedelta(minutes=5) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": "%", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "%", "unit_class": "unitless", }, { "statistic_id": "sensor.test2", "display_unit_of_measurement": "%", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "%", "unit_class": "unitless", }, { "statistic_id": "sensor.test3", "display_unit_of_measurement": "%", "has_mean": True, "has_sum": False, "name": None, "source": "recorder", "statistics_unit_of_measurement": "%", "unit_class": "unitless", }, { "statistic_id": "sensor.test4", "display_unit_of_measurement": "EUR", "has_mean": False, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "EUR", "unit_class": None, }, ] # Adjust the inserted statistics sum_adjustment = -10 sum_adjustement_start = zero + timedelta(minutes=65) for i in range(13, 24): expected_sums["sensor.test4"][i] += sum_adjustment instance.async_adjust_statistics( "sensor.test4", sum_adjustement_start, sum_adjustment, "EUR" ) wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="5minute") expected_stats = { "sensor.test1": [], "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], } start = zero end = zero + timedelta(minutes=5) for i in range(24): for entity_id in [ "sensor.test1", "sensor.test2", "sensor.test3", "sensor.test4", ]: expected_average = ( expected_averages[entity_id][i] if entity_id in expected_averages else None ) expected_minimum = ( expected_minima[entity_id][i] if entity_id in expected_minima else None ) expected_maximum = ( expected_maxima[entity_id][i] if entity_id in expected_maxima else None ) expected_state = ( expected_states[entity_id][i] if entity_id in expected_states else None ) expected_sum = ( expected_sums[entity_id][i] if entity_id in expected_sums else None ) expected_stats[entity_id].append( { "start": process_timestamp(start).timestamp(), "end": process_timestamp(end).timestamp(), "mean": pytest.approx(expected_average), "min": pytest.approx(expected_minimum), "max": pytest.approx(expected_maximum), "last_reset": None, "state": expected_state, "sum": expected_sum, } ) start += timedelta(minutes=5) end += timedelta(minutes=5) assert stats == expected_stats stats = statistics_during_period(hass, zero, period="hour") expected_stats = { "sensor.test1": [], "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], } start = zero end = zero + timedelta(hours=1) for i in range(2): for entity_id in [ "sensor.test1", "sensor.test2", "sensor.test3", "sensor.test4", ]: expected_average = ( mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_averages else None ) expected_minimum = ( min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_minima else None ) expected_maximum = ( max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_maxima else None ) expected_state = ( expected_states[entity_id][(i + 1) * 12 - 1] if entity_id in expected_states else None ) expected_sum = ( expected_sums[entity_id][(i + 1) * 12 - 1] if entity_id in expected_sums else None ) expected_stats[entity_id].append( { "start": process_timestamp(start).timestamp(), "end": process_timestamp(end).timestamp(), "mean": pytest.approx(expected_average), "min": pytest.approx(expected_minimum), "max": pytest.approx(expected_maximum), "last_reset": None, "state": expected_state, "sum": expected_sum, } ) start += timedelta(hours=1) end += timedelta(hours=1) assert stats == expected_stats stats = statistics_during_period(hass, zero, period="day") expected_stats = { "sensor.test1": [], "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], } start = dt_util.parse_datetime("2021-08-31T06:00:00+00:00") end = start + timedelta(days=1) for i in range(2): for entity_id in [ "sensor.test1", "sensor.test2", "sensor.test3", "sensor.test4", ]: expected_average = ( mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_averages else None ) expected_minimum = ( min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_minima else None ) expected_maximum = ( max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_maxima else None ) expected_state = ( expected_states[entity_id][(i + 1) * 12 - 1] if entity_id in expected_states else None ) expected_sum = ( expected_sums[entity_id][(i + 1) * 12 - 1] if entity_id in expected_sums else None ) expected_stats[entity_id].append( { "start": process_timestamp(start).timestamp(), "end": process_timestamp(end).timestamp(), "mean": pytest.approx(expected_average), "min": pytest.approx(expected_minimum), "max": pytest.approx(expected_maximum), "last_reset": None, "state": expected_state, "sum": expected_sum, } ) start += timedelta(days=1) end += timedelta(days=1) assert stats == expected_stats stats = statistics_during_period(hass, zero, period="month") expected_stats = { "sensor.test1": [], "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], } start = dt_util.parse_datetime("2021-08-01T06:00:00+00:00") end = dt_util.parse_datetime("2021-09-01T06:00:00+00:00") for i in range(2): for entity_id in [ "sensor.test1", "sensor.test2", "sensor.test3", "sensor.test4", ]: expected_average = ( mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_averages else None ) expected_minimum = ( min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_minima else None ) expected_maximum = ( max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) if entity_id in expected_maxima else None ) expected_state = ( expected_states[entity_id][(i + 1) * 12 - 1] if entity_id in expected_states else None ) expected_sum = ( expected_sums[entity_id][(i + 1) * 12 - 1] if entity_id in expected_sums else None ) expected_stats[entity_id].append( { "start": process_timestamp(start).timestamp(), "end": process_timestamp(end).timestamp(), "mean": pytest.approx(expected_average), "min": pytest.approx(expected_minimum), "max": pytest.approx(expected_maximum), "last_reset": None, "state": expected_state, "sum": expected_sum, } ) start = (start + timedelta(days=31)).replace(day=1) end = (end + timedelta(days=31)).replace(day=1) assert stats == expected_stats assert "Error while processing event StatisticsTask" not in caplog.text
Record some test states. We inject a bunch of state updates for measurement sensors.
def record_states( hass: HomeAssistant, freezer: FrozenDateTimeFactory, zero: datetime, entity_id: str, attributes, seq=None, ): """Record some test states. We inject a bunch of state updates for measurement sensors. """ attributes = dict(attributes) if seq is None: seq = [-10, 15, 30] def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) four = three + timedelta(seconds=10 * 5) states = {entity_id: []} freezer.move_to(one) states[entity_id].append(set_state(entity_id, str(seq[0]), attributes=attributes)) freezer.move_to(two) states[entity_id].append(set_state(entity_id, str(seq[1]), attributes=attributes)) freezer.move_to(three) states[entity_id].append(set_state(entity_id, str(seq[2]), attributes=attributes)) return four, states
Record some test states. We inject a bunch of state updates for meter sensors.
def record_meter_states( hass: HomeAssistant, freezer: FrozenDateTimeFactory, zero: datetime, entity_id: str, _attributes, seq, ): """Record some test states. We inject a bunch of state updates for meter sensors. """ def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) return hass.states.get(entity_id) one = zero + timedelta(seconds=15 * 5) # 00:01:15 two = one + timedelta(seconds=30 * 5) # 00:03:45 three = two + timedelta(seconds=15 * 5) # 00:05:00 four = three + timedelta(seconds=15 * 5) # 00:06:15 five = four + timedelta(seconds=30 * 5) # 00:08:45 six = five + timedelta(seconds=15 * 5) # 00:10:00 seven = six + timedelta(seconds=15 * 5) # 00:11:45 eight = seven + timedelta(seconds=30 * 5) # 00:13:45 attributes = dict(_attributes) if "last_reset" in _attributes: attributes["last_reset"] = zero.isoformat() states = {entity_id: []} freezer.move_to(zero) states[entity_id].append(set_state(entity_id, seq[0], attributes=attributes)) freezer.move_to(one) states[entity_id].append(set_state(entity_id, seq[1], attributes=attributes)) freezer.move_to(two) states[entity_id].append(set_state(entity_id, seq[2], attributes=attributes)) freezer.move_to(three) states[entity_id].append(set_state(entity_id, seq[3], attributes=attributes)) attributes = dict(_attributes) if "last_reset" in _attributes: attributes["last_reset"] = four.isoformat() freezer.move_to(four) states[entity_id].append(set_state(entity_id, seq[4], attributes=attributes)) freezer.move_to(five) states[entity_id].append(set_state(entity_id, seq[5], attributes=attributes)) freezer.move_to(six) states[entity_id].append(set_state(entity_id, seq[6], attributes=attributes)) freezer.move_to(seven) states[entity_id].append(set_state(entity_id, seq[7], attributes=attributes)) freezer.move_to(eight) states[entity_id].append(set_state(entity_id, seq[8], attributes=attributes)) return four, eight, states
Record test state. We inject a state update for meter sensor.
def record_meter_state( hass: HomeAssistant, freezer: FrozenDateTimeFactory, zero: datetime, entity_id: str, attributes, seq, ): """Record test state. We inject a state update for meter sensor. """ def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) states = {entity_id: []} freezer.move_to(zero) states[entity_id].append(set_state(entity_id, seq[0], attributes=attributes)) return states
Record some test states. We inject a bunch of state updates temperature sensors.
def record_states_partially_unavailable(hass, zero, entity_id, attributes): """Record some test states. We inject a bunch of state updates temperature sensors. """ def set_state(entity_id, state, **kwargs): """Set the state.""" hass.states.set(entity_id, state, **kwargs) wait_recording_done(hass) return hass.states.get(entity_id) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) four = three + timedelta(seconds=15 * 5) states = {entity_id: []} with freeze_time(one) as freezer: states[entity_id].append(set_state(entity_id, "10", attributes=attributes)) freezer.move_to(two) states[entity_id].append(set_state(entity_id, "25", attributes=attributes)) freezer.move_to(three) states[entity_id].append( set_state(entity_id, STATE_UNAVAILABLE, attributes=attributes) ) return four, states
Disable the creation of the database issue.
def disable_db_issue_creation(): """Disable the creation of the database issue.""" with patch( "homeassistant.components.recorder.util._async_create_mariadb_range_index_regression_issue" ): yield
Test compile missing statistics.
def test_compile_missing_statistics( freezer: FrozenDateTimeFactory, recorder_db_url: str, tmp_path: Path ) -> None: """Test compile missing statistics.""" if recorder_db_url == "sqlite://": # On-disk database because we need to stop and start hass # and have it persist. recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") config = { "db_url": recorder_db_url, } three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) start_time = three_days_ago + timedelta(days=3) freezer.move_to(three_days_ago) with get_test_home_assistant() as hass: hass.set_state(CoreState.not_running) recorder_helper.async_initialize_recorder(hass) setup_component(hass, "sensor", {}) setup_component(hass, "recorder", {"recorder": config}) hass.start() wait_recording_done(hass) wait_recording_done(hass) hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) wait_recording_done(hass) two_days_ago = three_days_ago + timedelta(days=1) freezer.move_to(two_days_ago) do_adhoc_statistics(hass, start=two_days_ago) wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum"} ) latest_stat = latest["sensor.test1"][0] assert latest_stat["start"] == 1609545600.0 assert latest_stat["end"] == 1609545600.0 + 300 count = 1 past_time = two_days_ago while past_time <= start_time: freezer.move_to(past_time) hass.states.set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) past_time += timedelta(minutes=5) count += 1 wait_recording_done(hass) states = get_significant_states( hass, three_days_ago, past_time, ["sensor.test1"] ) assert len(states["sensor.test1"]) == 577 hass.stop() freezer.move_to(start_time) with get_test_home_assistant() as hass: hass.set_state(CoreState.not_running) recorder_helper.async_initialize_recorder(hass) setup_component(hass, "sensor", {}) hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) setup_component(hass, "recorder", {"recorder": config}) hass.start() wait_recording_done(hass) wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum", "max", "mean", "min"} ) latest_stat = latest["sensor.test1"][0] assert latest_stat["start"] == 1609718100.0 assert latest_stat["end"] == 1609718100.0 + 300 assert latest_stat["mean"] == 576.0 assert latest_stat["min"] == 575.0 assert latest_stat["max"] == 576.0 stats = statistics_during_period( hass, two_days_ago, start_time, units={"energy": "kWh"}, statistic_ids={"sensor.test1"}, period="hour", types={"mean"}, ) # Make sure we have 48 hours of statistics assert len(stats["sensor.test1"]) == 48 # Make sure the last mean is 570.5 assert stats["sensor.test1"][-1]["mean"] == 570.5 hass.stop()
Auto mock bluetooth.
def mock_bluetooth(enable_bluetooth): """Auto mock bluetooth."""
Auto mock bluetooth.
def mock_bluetooth(enable_bluetooth): """Auto mock bluetooth."""
Create a mock config entry.
def config_entry_fixture() -> MockConfigEntry: """Create a mock config entry.""" return MockConfigEntry(domain=DOMAIN, title="Sentry")
Create hass config fixture.
def config_fixture() -> dict[str, Any]: """Create hass config fixture.""" return {DOMAIN: {"dsn": "http://[email protected]/1"}}
Override async_setup_entry.
def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.seventeentrack.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry
Return the default mocked config entry.
def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( domain="seventeentrack", data=VALID_CONFIG, options=VALID_OPTIONS, unique_id=ACCOUNT_ID, )
Return the default mocked config entry.
def mock_config_entry_with_default_options() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( domain="seventeentrack", data=VALID_CONFIG, options={ CONF_SHOW_ARCHIVED: DEFAULT_SHOW_ARCHIVED, CONF_SHOW_DELIVERED: DEFAULT_SHOW_DELIVERED, }, unique_id=ACCOUNT_ID, )
Build a fixture for the 17Track API.
def mock_seventeentrack(): """Build a fixture for the 17Track API.""" mock_seventeentrack_api = AsyncMock() with ( patch( "homeassistant.components.seventeentrack.SeventeenTrackClient", return_value=mock_seventeentrack_api, ), patch( "homeassistant.components.seventeentrack.config_flow.SeventeenTrackClient", return_value=mock_seventeentrack_api, ) as mock_seventeentrack_api, ): mock_seventeentrack_api.return_value.profile.account_id = ACCOUNT_ID mock_seventeentrack_api.return_value.profile.login.return_value = True mock_seventeentrack_api.return_value.profile.packages.return_value = [] mock_seventeentrack_api.return_value.profile.summary.return_value = ( DEFAULT_SUMMARY ) yield mock_seventeentrack_api
Build a Package of the 17Track API.
def get_package( tracking_number: str = "456", destination_country: int = 206, friendly_name: str | None = "friendly name 1", info_text: str = "info text 1", location: str = "location 1", timestamp: str = "2020-08-10 10:32", origin_country: int = 206, package_type: int = 2, status: int = 0, tz: str = "UTC", ): """Build a Package of the 17Track API.""" return Package( tracking_number=tracking_number, destination_country=destination_country, friendly_name=friendly_name, info_text=info_text, location=location, timestamp=timestamp, origin_country=origin_country, package_type=package_type, status=status, tz=tz, )
Override async_setup_entry.
def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" with patch( "homeassistant.components.sfr_box.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry
Create and register mock config entry.
def get_config_entry(hass: HomeAssistant) -> ConfigEntry: """Create and register mock config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, source=SOURCE_USER, data={CONF_HOST: "192.168.0.1"}, unique_id="e4:5d:51:00:11:22", options={}, entry_id="123456", ) config_entry.add_to_hass(hass) return config_entry
Create and register mock config entry.
def get_config_entry_with_auth(hass: HomeAssistant) -> ConfigEntry: """Create and register mock config entry.""" config_entry_with_auth = MockConfigEntry( domain=DOMAIN, source=SOURCE_USER, data={ CONF_HOST: "192.168.0.1", CONF_USERNAME: "admin", CONF_PASSWORD: "password", }, unique_id="e4:5d:51:00:11:23", options={}, entry_id="1234567", ) config_entry_with_auth.add_to_hass(hass) return config_entry_with_auth
Fixture for SFRBox.dsl_get_info.
def dsl_get_info() -> Generator[DslInfo, None, None]: """Fixture for SFRBox.dsl_get_info.""" dsl_info = DslInfo(**json.loads(load_fixture("dsl_getInfo.json", DOMAIN))) with patch( "homeassistant.components.sfr_box.coordinator.SFRBox.dsl_get_info", return_value=dsl_info, ): yield dsl_info
Fixture for SFRBox.ftth_get_info.
def ftth_get_info() -> Generator[FtthInfo, None, None]: """Fixture for SFRBox.ftth_get_info.""" info = FtthInfo(**json.loads(load_fixture("ftth_getInfo.json", DOMAIN))) with patch( "homeassistant.components.sfr_box.coordinator.SFRBox.ftth_get_info", return_value=info, ): yield info
Fixture for SFRBox.system_get_info.
def system_get_info() -> Generator[SystemInfo, None, None]: """Fixture for SFRBox.system_get_info.""" info = SystemInfo(**json.loads(load_fixture("system_getInfo.json", DOMAIN))) with patch( "homeassistant.components.sfr_box.coordinator.SFRBox.system_get_info", return_value=info, ): yield info
Fixture for SFRBox.wan_get_info.
def wan_get_info() -> Generator[WanInfo, None, None]: """Fixture for SFRBox.wan_get_info.""" info = WanInfo(**json.loads(load_fixture("wan_getInfo.json", DOMAIN))) with patch( "homeassistant.components.sfr_box.coordinator.SFRBox.wan_get_info", return_value=info, ): yield info
Override PLATFORMS.
def override_platforms() -> Generator[None, None, None]: """Override PLATFORMS.""" with patch("homeassistant.components.sfr_box.PLATFORMS", [Platform.BINARY_SENSOR]): yield
Override PLATFORMS_WITH_AUTH.
def override_platforms() -> Generator[None, None, None]: """Override PLATFORMS_WITH_AUTH.""" with ( patch( "homeassistant.components.sfr_box.PLATFORMS_WITH_AUTH", [Platform.BUTTON] ), patch("homeassistant.components.sfr_box.coordinator.SFRBox.authenticate"), ): yield
Override PLATFORMS.
def override_platforms() -> Generator[None, None, None]: """Override PLATFORMS.""" with patch("homeassistant.components.sfr_box.PLATFORMS", []): yield
Override PLATFORMS.
def override_platforms() -> Generator[None, None, None]: """Override PLATFORMS.""" with patch("homeassistant.components.sfr_box.PLATFORMS", []): yield
Override PLATFORMS.
def override_platforms() -> Generator[None, None, None]: """Override PLATFORMS.""" with patch("homeassistant.components.sfr_box.PLATFORMS", [Platform.SENSOR]): yield
Mock light block set_state.
def mock_light_set_state( turn="on", mode="color", red=45, green=55, blue=65, white=70, gain=19, temp=4050, brightness=50, effect=0, transition=0, ): """Mock light block set_state.""" return { "ison": turn == "on", "mode": mode, "red": red, "green": green, "blue": blue, "white": white, "gain": gain, "temp": temp, "brightness": brightness, "effect": effect, "transition": transition, }
Mock white light block set_state.
def mock_white_light_set_state( turn="on", temp=4050, gain=19, brightness=128, transition=0, ): """Mock white light block set_state.""" return { "ison": turn == "on", "mode": "white", "gain": gain, "temp": temp, "brightness": brightness, "transition": transition, }
Mock out coap.
def mock_coap(): """Mock out coap.""" with patch( "homeassistant.components.shelly.utils.COAP", return_value=Mock( initialize=AsyncMock(), close=Mock(), ), ): yield
Mock out ws_server.
def mock_ws_server(): """Mock out ws_server.""" with patch("homeassistant.components.shelly.utils.get_ws_context"): yield
Return an empty, loaded, registry.
def device_reg(hass: HomeAssistant): """Return an empty, loaded, registry.""" return mock_device_registry(hass)
Track calls to a mock service.
def calls(hass: HomeAssistant): """Track calls to a mock service.""" return async_mock_service(hass, "test", "automation")
Yield caught shelly_click events.
def events(hass: HomeAssistant): """Yield caught shelly_click events.""" return async_capture_events(hass, EVENT_SHELLY_CLICK)
Mock rpc (Gen2, Websocket) device.
def _mock_rpc_device(version: str | None = None): """Mock rpc (Gen2, Websocket) device.""" device = Mock( spec=RpcDevice, config=MOCK_CONFIG, event={}, shelly=MOCK_SHELLY_RPC, version=version or "1.0.0", hostname="test-host", status=MOCK_STATUS_RPC, firmware_version="some fw string", initialized=True, ) type(device).name = PropertyMock(return_value="Test name") return device
Auto mock bluetooth.
def mock_bluetooth(enable_bluetooth): """Auto mock bluetooth."""
Test get_release_url() with a device without a release note URL.
def test_get_release_url( gen: int, model: str, beta: bool, expected: str | None ) -> None: """Test get_release_url() with a device without a release note URL.""" result = get_release_url(gen, model, beta) assert result is expected
Mutate status for rpc device.
def mutate_rpc_device_status( monkeypatch: pytest.MonkeyPatch, mock_rpc_device: Mock, top_level_key: str, key: str, value: Any, ) -> None: """Mutate status for rpc device.""" new_status = deepcopy(mock_rpc_device.status) new_status[top_level_key][key] = value monkeypatch.setattr(mock_rpc_device, "status", new_status)
Inject event for rpc device.
def inject_rpc_device_event( monkeypatch: pytest.MonkeyPatch, mock_rpc_device: Mock, event: Mapping[str, list[dict[str, Any]] | float], ) -> None: """Inject event for rpc device.""" monkeypatch.setattr(mock_rpc_device, "event", event) mock_rpc_device.mock_event()
Register enabled entity, return entity_id.
def register_entity( hass: HomeAssistant, domain: str, object_id: str, unique_id: str, config_entry: ConfigEntry | None = None, capabilities: Mapping[str, Any] | None = None, ) -> str: """Register enabled entity, return entity_id.""" entity_registry = async_get(hass) entity_registry.async_get_or_create( domain, DOMAIN, f"{MOCK_MAC}-{unique_id}", suggested_object_id=object_id, disabled_by=None, config_entry=config_entry, capabilities=capabilities, ) return f"{domain}.{object_id}"
Get Shelly entity.
def get_entity( hass: HomeAssistant, domain: str, unique_id: str, ) -> str | None: """Get Shelly entity.""" entity_registry = async_get(hass) return entity_registry.async_get_entity_id( domain, DOMAIN, f"{MOCK_MAC}-{unique_id}" )
Return entity state.
def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: """Return entity state.""" entity = hass.states.get(entity_id) assert entity return entity.state
Register Shelly device.
def register_device(device_reg: DeviceRegistry, config_entry: ConfigEntry) -> None: """Register Shelly device.""" device_reg.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, )
Mock a coroutine that creates a process when yielded.
def mock_process_creator(error: bool = False): """Mock a coroutine that creates a process when yielded.""" async def communicate() -> tuple[bytes, bytes]: """Mock a coroutine that runs a process when yielded. Returns a tuple of (stdout, stderr). """ return b"I am stdout", b"I am stderr" mock_process = MagicMock() mock_process.communicate = communicate mock_process.returncode = int(error) return mock_process