response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Log a warning once if a sensor with state_class_total has a negative value. | def warn_negative(hass: HomeAssistant, entity_id: str, state: State) -> None:
"""Log a warning once if a sensor with state_class_total has a negative value."""
if WARN_NEGATIVE not in hass.data:
hass.data[WARN_NEGATIVE] = set()
if entity_id not in hass.data[WARN_NEGATIVE]:
hass.data[WARN_NEGATIVE].add(entity_id)
entity_info = entity_sources(hass).get(entity_id)
domain = entity_info["domain"] if entity_info else None
_LOGGER.warning(
(
"Entity %s %shas state class total_increasing, but its state is "
"negative. Triggered by state %s with last_updated set to %s. Please %s"
),
entity_id,
f"from integration {domain} " if domain else "",
state.state,
state.last_updated.isoformat(),
_suggest_report_issue(hass, entity_id),
) |
Test if a total_increasing sensor has been reset. | def reset_detected(
hass: HomeAssistant,
entity_id: str,
fstate: float,
previous_fstate: float | None,
state: State,
) -> bool:
"""Test if a total_increasing sensor has been reset."""
if previous_fstate is None:
return False
if 0.9 * previous_fstate <= fstate < previous_fstate:
warn_dip(hass, entity_id, state, previous_fstate)
if fstate < 0:
warn_negative(hass, entity_id, state)
raise HomeAssistantError
return fstate < 0.9 * previous_fstate |
Prepare a dict with wanted statistics for entities. | def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]:
"""Prepare a dict with wanted statistics for entities."""
return {
state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]]
for state in sensor_states
} |
Parse last_reset and convert it to UTC. | def _last_reset_as_utc_isoformat(last_reset_s: Any, entity_id: str) -> str | None:
"""Parse last_reset and convert it to UTC."""
if last_reset_s is None:
return None
if isinstance(last_reset_s, str):
last_reset = dt_util.parse_datetime(last_reset_s)
else:
last_reset = None
if last_reset is None:
_LOGGER.warning(
"Ignoring invalid last reset '%s' for %s", last_reset_s, entity_id
)
return None
return dt_util.as_utc(last_reset).isoformat() |
Convert a timestamp to ISO format or return None. | def _timestamp_to_isoformat_or_none(timestamp: float | None) -> str | None:
"""Convert a timestamp to ISO format or return None."""
if timestamp is None:
return None
return dt_util.utc_from_timestamp(timestamp).isoformat() |
Compile statistics for all entities during start-end. | def compile_statistics( # noqa: C901
hass: HomeAssistant,
session: Session,
start: datetime.datetime,
end: datetime.datetime,
) -> statistics.PlatformCompiledStatistics:
"""Compile statistics for all entities during start-end."""
result: list[StatisticResult] = []
sensor_states = _get_sensor_states(hass)
wanted_statistics = _wanted_statistics(sensor_states)
# Get history between start and end
entities_full_history = [
i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id]
]
history_list: dict[str, list[State]] = {}
if entities_full_history:
history_list = history.get_full_significant_states_with_session(
hass,
session,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_full_history,
significant_changes_only=False,
)
entities_significant_history = [
i.entity_id
for i in sensor_states
if "sum" not in wanted_statistics[i.entity_id]
]
if entities_significant_history:
_history_list = history.get_full_significant_states_with_session(
hass,
session,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_significant_history,
)
history_list = {**history_list, **_history_list}
entities_with_float_states: dict[str, list[tuple[float, State]]] = {}
for _state in sensor_states:
entity_id = _state.entity_id
# If there are no recent state changes, the sensor's state may already be pruned
# from the recorder. Get the state from the state machine instead.
if not (entity_history := history_list.get(entity_id, [_state])):
continue
if not (float_states := _entity_history_to_float_and_state(entity_history)):
continue
entities_with_float_states[entity_id] = float_states
# Only lookup metadata for entities that have valid float states
# since it will result in cache misses for statistic_ids
# that are not in the metadata table and we are not working
# with them anyway.
old_metadatas = statistics.get_metadata_with_session(
get_instance(hass), session, statistic_ids=set(entities_with_float_states)
)
to_process: list[tuple[str, str | None, str, list[tuple[float, State]]]] = []
to_query: set[str] = set()
for _state in sensor_states:
entity_id = _state.entity_id
if not (maybe_float_states := entities_with_float_states.get(entity_id)):
continue
statistics_unit, valid_float_states = _normalize_states(
hass,
old_metadatas,
maybe_float_states,
entity_id,
)
if not valid_float_states:
continue
state_class: str = _state.attributes[ATTR_STATE_CLASS]
to_process.append((entity_id, statistics_unit, state_class, valid_float_states))
if "sum" in wanted_statistics[entity_id]:
to_query.add(entity_id)
last_stats = statistics.get_latest_short_term_statistics_with_session(
hass, session, to_query, {"last_reset", "state", "sum"}, metadata=old_metadatas
)
for ( # pylint: disable=too-many-nested-blocks
entity_id,
statistics_unit,
state_class,
valid_float_states,
) in to_process:
# Check metadata
if old_metadata := old_metadatas.get(entity_id):
if not _equivalent_units(
{old_metadata[1]["unit_of_measurement"], statistics_unit}
):
if WARN_UNSTABLE_UNIT not in hass.data:
hass.data[WARN_UNSTABLE_UNIT] = set()
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
_LOGGER.warning(
(
"The unit of %s (%s) cannot be converted to the unit of"
" previously compiled statistics (%s). Generation of long"
" term statistics will be suppressed unless the unit"
" changes back to %s or a compatible unit. Go to %s to fix"
" this"
),
entity_id,
statistics_unit,
old_metadata[1]["unit_of_measurement"],
old_metadata[1]["unit_of_measurement"],
LINK_DEV_STATISTICS,
)
continue
# Set meta data
meta: StatisticMetaData = {
"has_mean": "mean" in wanted_statistics[entity_id],
"has_sum": "sum" in wanted_statistics[entity_id],
"name": None,
"source": RECORDER_DOMAIN,
"statistic_id": entity_id,
"unit_of_measurement": statistics_unit,
}
# Make calculations
stat: StatisticData = {"start": start}
if "max" in wanted_statistics[entity_id]:
stat["max"] = max(
*itertools.islice(zip(*valid_float_states, strict=False), 1)
)
if "min" in wanted_statistics[entity_id]:
stat["min"] = min(
*itertools.islice(zip(*valid_float_states, strict=False), 1)
)
if "mean" in wanted_statistics[entity_id]:
stat["mean"] = _time_weighted_average(valid_float_states, start, end)
if "sum" in wanted_statistics[entity_id]:
last_reset = old_last_reset = None
new_state = old_state = None
_sum = 0.0
if entity_id in last_stats:
# We have compiled history for this sensor before,
# use that as a starting point.
last_stat = last_stats[entity_id][0]
last_reset = _timestamp_to_isoformat_or_none(last_stat["last_reset"])
old_last_reset = last_reset
# If there are no previous values and has_sum
# was previously false there will be no last_stat
# for state or sum
new_state = old_state = last_stat.get("state")
_sum = last_stat.get("sum") or 0.0
for fstate, state in valid_float_states:
reset = False
if (
state_class != SensorStateClass.TOTAL_INCREASING
and (
last_reset := _last_reset_as_utc_isoformat(
state.attributes.get("last_reset"), entity_id
)
)
!= old_last_reset
and last_reset is not None
):
if old_state is None:
_LOGGER.info(
(
"Compiling initial sum statistics for %s, zero point"
" set to %s"
),
entity_id,
fstate,
)
else:
_LOGGER.info(
(
"Detected new cycle for %s, last_reset set to %s (old"
" last_reset %s)"
),
entity_id,
last_reset,
old_last_reset,
)
reset = True
elif old_state is None and last_reset is None:
reset = True
_LOGGER.info(
"Compiling initial sum statistics for %s, zero point set to %s",
entity_id,
fstate,
)
elif state_class == SensorStateClass.TOTAL_INCREASING:
try:
if old_state is None or reset_detected(
hass, entity_id, fstate, new_state, state
):
reset = True
_LOGGER.info(
(
"Detected new cycle for %s, value dropped from %s"
" to %s, triggered by state with last_updated set"
" to %s"
),
entity_id,
new_state,
fstate,
state.last_updated.isoformat(),
)
except HomeAssistantError:
continue
if reset:
# The sensor has been reset, update the sum
if old_state is not None and new_state is not None:
_sum += new_state - old_state
# ..and update the starting point
new_state = fstate
old_last_reset = last_reset
# Force a new cycle for an existing sensor to start at 0
if old_state is not None:
old_state = 0.0
else:
old_state = new_state
else:
new_state = fstate
if new_state is None or old_state is None:
# No valid updates
continue
# Update the sum with the last state
_sum += new_state - old_state
if last_reset is not None:
stat["last_reset"] = dt_util.parse_datetime(last_reset)
stat["sum"] = _sum
stat["state"] = new_state
result.append({"meta": meta, "stat": stat})
return statistics.PlatformCompiledStatistics(result, old_metadatas) |
Return all or filtered statistic_ids and meta data. | def list_statistic_ids(
hass: HomeAssistant,
statistic_ids: list[str] | tuple[str] | None = None,
statistic_type: str | None = None,
) -> dict:
"""Return all or filtered statistic_ids and meta data."""
entities = _get_sensor_states(hass)
result: dict[str, StatisticMetaData] = {}
for state in entities:
state_class = state.attributes[ATTR_STATE_CLASS]
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
provided_statistics = DEFAULT_STATISTICS[state_class]
if statistic_type is not None and statistic_type not in provided_statistics:
continue
if statistic_ids is not None and state.entity_id not in statistic_ids:
continue
if (
"sum" in provided_statistics
and ATTR_LAST_RESET not in state.attributes
and state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT
):
continue
result[state.entity_id] = {
"has_mean": "mean" in provided_statistics,
"has_sum": "sum" in provided_statistics,
"name": None,
"source": RECORDER_DOMAIN,
"statistic_id": state.entity_id,
"unit_of_measurement": state_unit,
}
continue
return result |
Validate statistics. | def validate_statistics(
hass: HomeAssistant,
) -> dict[str, list[statistics.ValidationIssue]]:
"""Validate statistics."""
validation_result = defaultdict(list)
sensor_states = hass.states.all(DOMAIN)
metadatas = statistics.get_metadata(hass, statistic_source=RECORDER_DOMAIN)
sensor_entity_ids = {i.entity_id for i in sensor_states}
sensor_statistic_ids = set(metadatas)
instance = get_instance(hass)
for state in sensor_states:
entity_id = state.entity_id
state_class = try_parse_enum(
SensorStateClass, state.attributes.get(ATTR_STATE_CLASS)
)
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if metadata := metadatas.get(entity_id):
if not instance.entity_filter(state.entity_id):
# Sensor was previously recorded, but no longer is
validation_result[entity_id].append(
statistics.ValidationIssue(
"entity_no_longer_recorded",
{"statistic_id": entity_id},
)
)
if state_class is None:
# Sensor no longer has a valid state class
validation_result[entity_id].append(
statistics.ValidationIssue(
"unsupported_state_class",
{"statistic_id": entity_id, "state_class": state_class},
)
)
metadata_unit = metadata[1]["unit_of_measurement"]
converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit)
if not converter:
if not _equivalent_units({state_unit, metadata_unit}):
# The unit has changed, and it's not possible to convert
validation_result[entity_id].append(
statistics.ValidationIssue(
"units_changed",
{
"statistic_id": entity_id,
"state_unit": state_unit,
"metadata_unit": metadata_unit,
"supported_unit": metadata_unit,
},
)
)
elif state_unit not in converter.VALID_UNITS:
# The state unit can't be converted to the unit in metadata
valid_units = (unit or "<None>" for unit in converter.VALID_UNITS)
valid_units_str = ", ".join(sorted(valid_units))
validation_result[entity_id].append(
statistics.ValidationIssue(
"units_changed",
{
"statistic_id": entity_id,
"state_unit": state_unit,
"metadata_unit": metadata_unit,
"supported_unit": valid_units_str,
},
)
)
elif state_class is not None:
if not instance.entity_filter(state.entity_id):
# Sensor is not recorded
validation_result[entity_id].append(
statistics.ValidationIssue(
"entity_not_recorded",
{"statistic_id": entity_id},
)
)
for statistic_id in sensor_statistic_ids - sensor_entity_ids:
if split_entity_id(statistic_id)[0] != DOMAIN:
continue
# There is no sensor matching the statistics_id
validation_result[statistic_id].append(
statistics.ValidationIssue(
"no_state",
{
"statistic_id": statistic_id,
},
)
)
return validation_result |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if (device_class := new_attrs.get(ATTR_DEVICE_CLASS)) is None:
return None
absolute_change: float | None = None
percentage_change: float | None = None
if device_class == SensorDeviceClass.TEMPERATURE:
if new_attrs.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.FAHRENHEIT:
absolute_change = 1.0
else:
absolute_change = 0.5
if device_class in (SensorDeviceClass.BATTERY, SensorDeviceClass.HUMIDITY):
absolute_change = 1.0
if device_class in (
SensorDeviceClass.AQI,
SensorDeviceClass.CO,
SensorDeviceClass.CO2,
SensorDeviceClass.PM25,
SensorDeviceClass.PM10,
SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS,
):
absolute_change = 1.0
percentage_change = 2.0
if not check_valid_float(new_state):
# New state is invalid, don't report it
return False
if not check_valid_float(old_state):
# Old state was invalid, we should report again
return True
if absolute_change is not None and percentage_change is not None:
return _absolute_and_relative_change(
float(old_state), float(new_state), absolute_change, percentage_change
)
if absolute_change is not None:
return check_absolute_change(
float(old_state), float(new_state), absolute_change
)
return None |
Set up the sensor websocket API. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the sensor websocket API."""
websocket_api.async_register_command(hass, ws_device_class_units)
websocket_api.async_register_command(hass, ws_numeric_device_classes) |
Return supported units for a device class. | def ws_device_class_units(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Return supported units for a device class."""
device_class = msg["device_class"]
convertible_units = []
if device_class in UNIT_CONVERTERS and device_class in DEVICE_CLASS_UNITS:
convertible_units = sorted(
DEVICE_CLASS_UNITS[device_class],
key=lambda s: str.casefold(str(s)),
)
connection.send_result(msg["id"], {"units": convertible_units}) |
Return numeric sensor device classes. | def ws_numeric_device_classes(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Return numeric sensor device classes."""
numeric_device_classes = set(SensorDeviceClass) - NON_NUMERIC_DEVICE_CLASSES
connection.send_result(
msg["id"], {"numeric_device_classes": list(numeric_device_classes)}
) |
Return true if the sensor must be numeric. | def _numeric_state_expected(
device_class: SensorDeviceClass | None,
state_class: SensorStateClass | str | None,
native_unit_of_measurement: str | None,
suggested_display_precision: int | None,
) -> bool:
"""Return true if the sensor must be numeric."""
# Note: the order of the checks needs to be kept aligned
# with the checks in `state` property.
if device_class in NON_NUMERIC_DEVICE_CLASSES:
return False
if (
state_class is not None
or native_unit_of_measurement is not None
or suggested_display_precision is not None
):
return True
# Sensors with custom device classes will have the device class
# converted to None and are not considered numeric
return device_class is not None |
Update the suggested_unit_of_measurement according to the unit system. | def async_update_suggested_units(hass: HomeAssistant) -> None:
"""Update the suggested_unit_of_measurement according to the unit system."""
registry = er.async_get(hass)
for entry in registry.entities.values():
if entry.domain != DOMAIN:
continue
sensor_private_options = dict(entry.options.get(f"{DOMAIN}.private", {}))
sensor_private_options["refresh_initial_entity_options"] = True
registry.async_update_entity_options(
entry.entity_id,
f"{DOMAIN}.private",
sensor_private_options,
) |
Return the display precision. | def _display_precision(hass: HomeAssistant, entity_id: str) -> int | None:
"""Return the display precision."""
if not (entry := er.async_get(hass).async_get(entity_id)) or not (
sensor_options := entry.options.get(DOMAIN)
):
return None
if (display_precision := sensor_options.get("display_precision")) is not None:
return cast(int, display_precision)
return sensor_options.get("suggested_display_precision") |
Return the state rounded for presentation. | def async_rounded_state(hass: HomeAssistant, entity_id: str, state: State) -> str:
"""Return the state rounded for presentation."""
value = state.state
if (precision := _display_precision(hass, entity_id)) is None:
return value
with suppress(TypeError, ValueError):
numerical_value = float(value)
value = f"{numerical_value:z.{precision}f}"
return value |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Process a Sentry event before sending it to Sentry. | def process_before_send(
hass: HomeAssistant,
options: MappingProxyType[str, Any],
channel: str,
huuid: str,
system_info: dict[str, bool | str],
custom_components: dict[str, Integration],
event: dict[str, Any],
hint: dict[str, Any],
):
"""Process a Sentry event before sending it to Sentry."""
# Filter out handled events by default
if (
"tags" in event
and event["tags"].get("handled", "no") == "yes"
and not options.get(CONF_EVENT_HANDLED)
):
return None
# Additional tags to add to the event
additional_tags = {
"channel": channel,
"installation_type": system_info["installation_type"],
"uuid": huuid,
}
# Find out all integrations in use, filter "auth", because it
# triggers security rules, hiding all data.
integrations = [
integration
for integration in hass.config.components
if integration != "auth" and "." not in integration
]
# Add additional tags based on what caused the event.
if (platform := entity_platform.current_platform.get()) is not None:
# This event happened in a platform
additional_tags["custom_component"] = "no"
additional_tags["integration"] = platform.platform_name
additional_tags["platform"] = platform.domain
elif "logger" in event:
# Logger event, try to get integration information from the logger name.
matches = LOGGER_INFO_REGEX.findall(event["logger"])
if matches:
group1, group2, group3, group4 = matches[0]
# Handle the "homeassistant." package differently
if group1 == "homeassistant" and group2 and group3:
if group2 == "components":
# This logger is from a component
additional_tags["custom_component"] = "no"
additional_tags["integration"] = group3
if group4 and group4 in ENTITY_COMPONENTS:
additional_tags["platform"] = group4
else:
# Not a component, could be helper, or something else.
additional_tags[group2] = group3
else:
# Not the "homeassistant" package, this third-party
if not options.get(CONF_EVENT_THIRD_PARTY_PACKAGES):
return None
additional_tags["package"] = group1
# If this event is caused by an integration, add a tag if this
# integration is custom or not.
if (
"integration" in additional_tags
and additional_tags["integration"] in custom_components
):
if not options.get(CONF_EVENT_CUSTOM_COMPONENTS):
return None
additional_tags["custom_component"] = "yes"
# Update event with the additional tags
event.setdefault("tags", {}).update(additional_tags)
# Set user context to the installation UUID
event.setdefault("user", {}).update({"id": huuid})
# Update event data with Home Assistant Context
event.setdefault("contexts", {}).update(
{
"Home Assistant": {
"channel": channel,
"custom_components": "\n".join(sorted(custom_components)),
"integrations": "\n".join(sorted(integrations)),
**system_info,
},
}
)
return event |
Set up the available PM sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the available PM sensors."""
try:
coll = pm.PMDataCollector(
config.get(CONF_SERIAL_DEVICE), pm.SUPPORTED_SENSORS[config.get(CONF_BRAND)]
)
except KeyError:
_LOGGER.error(
"Brand %s not supported\n supported brands: %s",
config.get(CONF_BRAND),
pm.SUPPORTED_SENSORS.keys(),
)
return
except OSError as err:
_LOGGER.error(
"Could not open serial connection to %s (%s)",
config.get(CONF_SERIAL_DEVICE),
err,
)
return
dev = []
for pmname in coll.supported_values():
if config.get(CONF_NAME) is not None:
name = f"{config.get(CONF_NAME)} PM{pmname}"
else:
name = f"PM{pmname}"
dev.append(ParticulateMatterSensor(coll, name, pmname))
add_entities(dev) |
Set up the Sesame platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Sesame platform."""
api_key = config.get(CONF_API_KEY)
add_entities(
[SesameDevice(sesame) for sesame in pysesame2.get_sesames(api_key)],
update_before_add=True,
) |
Remove entity itself. | def remove_packages(hass: HomeAssistant, account_id: str, packages: set[str]) -> None:
"""Remove entity itself."""
reg = er.async_get(hass)
for package in packages:
entity_id = reg.async_get_entity_id(
"sensor",
"seventeentrack",
UNIQUE_ID_TEMPLATE.format(account_id, package),
)
if entity_id:
reg.async_remove(entity_id) |
Notify when package is delivered. | def notify_delivered(hass: HomeAssistant, friendly_name: str, tracking_number: str):
"""Notify when package is delivered."""
LOGGER.debug("Package delivered: %s", tracking_number)
identification = friendly_name if friendly_name else tracking_number
message = NOTIFICATION_DELIVERED_MESSAGE.format(identification, tracking_number)
title = NOTIFICATION_DELIVERED_TITLE.format(identification)
notification_id = NOTIFICATION_DELIVERED_TITLE.format(tracking_number)
persistent_notification.create(
hass, message, title=title, notification_id=notification_id
) |
Catch SFR errors. | def with_error_wrapping(
func: Callable[Concatenate[SFRBoxButton, _P], Awaitable[_T]],
) -> Callable[Concatenate[SFRBoxButton, _P], Coroutine[Any, Any, _T]]:
"""Catch SFR errors."""
@wraps(func)
async def wrapper(
self: SFRBoxButton,
*args: _P.args,
**kwargs: _P.kwargs,
) -> _T:
"""Catch SFRBoxError errors and raise HomeAssistantError."""
try:
return await func(self, *args, **kwargs)
except SFRBoxError as err:
raise HomeAssistantError(err) from err
return wrapper |
Migrate button unique IDs. | def async_migrate_unique_ids(
coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator,
entity_entry: er.RegistryEntry,
) -> dict[str, Any] | None:
"""Migrate button unique IDs."""
if not entity_entry.entity_id.startswith("button"):
return None
device_name = slugify(coordinator.device.name)
for key in ("reboot", "self_test", "mute", "unmute"):
old_unique_id = f"{device_name}_{key}"
if entity_entry.unique_id == old_unique_id:
new_unique_id = f"{coordinator.mac}_{key}"
LOGGER.debug(
"Migrating unique_id for %s entity from [%s] to [%s]",
entity_entry.entity_id,
old_unique_id,
new_unique_id,
)
return {
"new_unique_id": entity_entry.unique_id.replace(
old_unique_id, new_unique_id
)
}
return None |
Set up online climate devices. | def async_setup_climate_entities(
async_add_entities: AddEntitiesCallback,
coordinator: ShellyBlockCoordinator,
) -> None:
"""Set up online climate devices."""
device_block: Block | None = None
sensor_block: Block | None = None
assert coordinator.device.blocks
for block in coordinator.device.blocks:
if block.type == "device":
device_block = block
if hasattr(block, "targetTemp"):
sensor_block = block
if sensor_block and device_block:
LOGGER.debug("Setup online climate device %s", coordinator.name)
async_add_entities(
[BlockSleepingClimate(coordinator, sensor_block, device_block)]
) |
Restore sleeping climate devices. | def async_restore_climate_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
coordinator: ShellyBlockCoordinator,
) -> None:
"""Restore sleeping climate devices."""
ent_reg = er_async_get(hass)
entries = async_entries_for_config_entry(ent_reg, config_entry.entry_id)
for entry in entries:
if entry.domain != CLIMATE_DOMAIN:
continue
LOGGER.debug("Setup sleeping climate device %s", coordinator.name)
LOGGER.debug("Found entry %s [%s]", entry.original_name, entry.domain)
async_add_entities([BlockSleepingClimate(coordinator, None, None, entry)])
break |
Set up entities for RPC device. | def async_setup_rpc_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
climate_key_ids = get_rpc_key_ids(coordinator.device.status, "thermostat")
climate_ids = []
for id_ in climate_key_ids:
climate_ids.append(id_)
# There are three configuration scenarios for WallDisplay:
# - relay mode (no thermostat)
# - thermostat mode using the internal relay as an actuator
# - thermostat mode using an external (from another device) relay as
# an actuator
if is_rpc_thermostat_internal_actuator(coordinator.device.status):
# Wall Display relay is used as the thermostat actuator,
# we need to remove a switch entity
unique_id = f"{coordinator.mac}-switch:{id_}"
async_remove_shelly_entity(hass, "switch", unique_id)
if not climate_ids:
return
async_add_entities(RpcClimate(coordinator, id_) for id_ in climate_ids) |
Return Shelly entry data for a given config entry. | def get_entry_data(hass: HomeAssistant) -> dict[str, ShellyEntryData]:
"""Return Shelly entry data for a given config entry."""
return cast(dict[str, ShellyEntryData], hass.data[DOMAIN][DATA_CONFIG_ENTRY]) |
Get a Shelly block device coordinator for the given device id. | def get_block_coordinator_by_device_id(
hass: HomeAssistant, device_id: str
) -> ShellyBlockCoordinator | None:
"""Get a Shelly block device coordinator for the given device id."""
dev_reg = dr_async_get(hass)
if device := dev_reg.async_get(device_id):
for config_entry in device.config_entries:
if not (entry_data := get_entry_data(hass).get(config_entry)):
continue
if coordinator := entry_data.block:
return coordinator
return None |
Get a Shelly RPC device coordinator for the given device id. | def get_rpc_coordinator_by_device_id(
hass: HomeAssistant, device_id: str
) -> ShellyRpcCoordinator | None:
"""Get a Shelly RPC device coordinator for the given device id."""
dev_reg = dr_async_get(hass)
if device := dev_reg.async_get(device_id):
for config_entry in device.config_entries:
if not (entry_data := get_entry_data(hass).get(config_entry)):
continue
if coordinator := entry_data.rpc:
return coordinator
return None |
Set up cover for device. | def async_setup_block_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up cover for device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].block
assert coordinator and coordinator.device.blocks
blocks = [block for block in coordinator.device.blocks if block.type == "roller"]
if not blocks:
return
async_add_entities(BlockShellyCover(coordinator, block) for block in blocks) |
Set up entities for RPC device. | def async_setup_rpc_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
cover_key_ids = get_rpc_key_ids(coordinator.device.status, "cover")
if not cover_key_ids:
return
async_add_entities(RpcShellyCover(coordinator, id_) for id_ in cover_key_ids) |
Add trigger to triggers list. | def append_input_triggers(
triggers: list[dict[str, str]],
input_triggers: list[tuple[str, str]],
device_id: str,
) -> None:
"""Add trigger to triggers list."""
for trigger, subtype in input_triggers:
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_TYPE: trigger,
CONF_SUBTYPE: subtype,
}
) |
Set up entities for attributes. | def async_setup_entry_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: Mapping[tuple[str, str], BlockEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for attributes."""
coordinator = get_entry_data(hass)[config_entry.entry_id].block
assert coordinator
if coordinator.device.initialized:
async_setup_block_attribute_entities(
hass, async_add_entities, coordinator, sensors, sensor_class
)
else:
async_restore_block_attribute_entities(
hass,
config_entry,
async_add_entities,
coordinator,
sensors,
sensor_class,
) |
Set up entities for block attributes. | def async_setup_block_attribute_entities(
hass: HomeAssistant,
async_add_entities: AddEntitiesCallback,
coordinator: ShellyBlockCoordinator,
sensors: Mapping[tuple[str, str], BlockEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for block attributes."""
entities = []
assert coordinator.device.blocks
for block in coordinator.device.blocks:
for sensor_id in block.sensor_ids:
description = sensors.get((cast(str, block.type), sensor_id))
if description is None:
continue
# Filter out non-existing sensors and sensors without a value
if getattr(block, sensor_id, None) is None:
continue
# Filter and remove entities that according to settings
# should not create an entity
if description.removal_condition and description.removal_condition(
coordinator.device.settings, block
):
domain = sensor_class.__module__.split(".")[-1]
unique_id = f"{coordinator.mac}-{block.description}-{sensor_id}"
async_remove_shelly_entity(hass, domain, unique_id)
else:
entities.append(
sensor_class(coordinator, block, sensor_id, description)
)
if not entities:
return
async_add_entities(entities) |
Restore block attributes entities. | def async_restore_block_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
coordinator: ShellyBlockCoordinator,
sensors: Mapping[tuple[str, str], BlockEntityDescription],
sensor_class: Callable,
) -> None:
"""Restore block attributes entities."""
entities = []
ent_reg = er_async_get(hass)
entries = async_entries_for_config_entry(ent_reg, config_entry.entry_id)
domain = sensor_class.__module__.split(".")[-1]
for entry in entries:
if entry.domain != domain:
continue
attribute = entry.unique_id.split("-")[-1]
block_type = entry.unique_id.split("-")[-2].split("_")[0]
if description := sensors.get((block_type, attribute)):
entities.append(
sensor_class(coordinator, None, attribute, description, entry)
)
if not entities:
return
async_add_entities(entities) |
Set up entities for RPC sensors. | def async_setup_entry_rpc(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: Mapping[str, RpcEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for RPC sensors."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
if coordinator.device.initialized:
async_setup_rpc_attribute_entities(
hass, config_entry, async_add_entities, sensors, sensor_class
)
else:
async_restore_rpc_attribute_entities(
hass, config_entry, async_add_entities, coordinator, sensors, sensor_class
) |
Set up entities for RPC attributes. | def async_setup_rpc_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: Mapping[str, RpcEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for RPC attributes."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
polling_coordinator = None
if not (sleep_period := config_entry.data[CONF_SLEEP_PERIOD]):
polling_coordinator = get_entry_data(hass)[config_entry.entry_id].rpc_poll
assert polling_coordinator
entities = []
for sensor_id in sensors:
description = sensors[sensor_id]
key_instances = get_rpc_key_instances(
coordinator.device.status, description.key
)
for key in key_instances:
# Filter non-existing sensors
if description.sub_key not in coordinator.device.status[
key
] and not description.supported(coordinator.device.status[key]):
continue
# Filter and remove entities that according to settings/status
# should not create an entity
if description.removal_condition and description.removal_condition(
coordinator.device.config, coordinator.device.status, key
):
domain = sensor_class.__module__.split(".")[-1]
unique_id = f"{coordinator.mac}-{key}-{sensor_id}"
async_remove_shelly_entity(hass, domain, unique_id)
elif description.use_polling_coordinator:
if not sleep_period:
entities.append(
sensor_class(polling_coordinator, key, sensor_id, description)
)
else:
entities.append(sensor_class(coordinator, key, sensor_id, description))
if not entities:
return
async_add_entities(entities) |
Restore block attributes entities. | def async_restore_rpc_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
coordinator: ShellyRpcCoordinator,
sensors: Mapping[str, RpcEntityDescription],
sensor_class: Callable,
) -> None:
"""Restore block attributes entities."""
entities = []
ent_reg = er_async_get(hass)
entries = async_entries_for_config_entry(ent_reg, config_entry.entry_id)
domain = sensor_class.__module__.split(".")[-1]
for entry in entries:
if entry.domain != domain:
continue
key = entry.unique_id.split("-")[-2]
attribute = entry.unique_id.split("-")[-1]
if description := sensors.get(attribute):
entities.append(
sensor_class(coordinator, key, attribute, description, entry)
)
if not entities:
return
async_add_entities(entities) |
Set up entities for REST sensors. | def async_setup_entry_rest(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: Mapping[str, RestEntityDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for REST sensors."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rest
assert coordinator
async_add_entities(
sensor_class(coordinator, sensor_id, sensors[sensor_id])
for sensor_id in sensors
) |
Set up entities for block device. | def async_setup_block_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for block device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].block
assert coordinator
blocks = []
assert coordinator.device.blocks
for block in coordinator.device.blocks:
if block.type == "light":
blocks.append(block)
elif block.type == "relay" and block.channel is not None:
if not is_block_channel_type_light(
coordinator.device.settings, int(block.channel)
):
continue
blocks.append(block)
unique_id = f"{coordinator.mac}-{block.type}_{block.channel}"
async_remove_shelly_entity(hass, "switch", unique_id)
if not blocks:
return
async_add_entities(BlockShellyLight(coordinator, block) for block in blocks) |
Set up entities for RPC device. | def async_setup_rpc_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
switch_key_ids = get_rpc_key_ids(coordinator.device.status, "switch")
switch_ids = []
for id_ in switch_key_ids:
if not is_rpc_channel_type_light(coordinator.device.config, id_):
continue
switch_ids.append(id_)
unique_id = f"{coordinator.mac}-switch:{id_}"
async_remove_shelly_entity(hass, "switch", unique_id)
if switch_ids:
async_add_entities(
RpcShellySwitchAsLight(coordinator, id_) for id_ in switch_ids
)
return
if light_key_ids := get_rpc_key_ids(coordinator.device.status, "light"):
# Light mode remove RGB & RGBW entities, add light entities
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, ["rgb:0", "rgbw:0"]
)
async_add_entities(RpcShellyLight(coordinator, id_) for id_ in light_key_ids)
return
light_keys = [f"light:{i}" for i in range(SHELLY_PLUS_RGBW_CHANNELS)]
if rgb_key_ids := get_rpc_key_ids(coordinator.device.status, "rgb"):
# RGB mode remove light & RGBW entities, add RGB entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgbw:0"]
)
async_add_entities(RpcShellyRgbLight(coordinator, id_) for id_ in rgb_key_ids)
return
if rgbw_key_ids := get_rpc_key_ids(coordinator.device.status, "rgbw"):
# RGBW mode remove light & RGB entities, add RGBW entity
async_remove_shelly_rpc_entities(
hass, LIGHT_DOMAIN, coordinator.mac, [*light_keys, "rgb:0"]
)
async_add_entities(RpcShellyRgbwLight(coordinator, id_) for id_ in rgbw_key_ids) |
Describe logbook events. | def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[[str, str, Callable[[Event], dict]], None],
) -> None:
"""Describe logbook events."""
@callback
def async_describe_shelly_click_event(event: Event) -> dict[str, str]:
"""Describe shelly.click logbook event (block device)."""
device_id = event.data[ATTR_DEVICE_ID]
click_type = event.data[ATTR_CLICK_TYPE]
channel = event.data[ATTR_CHANNEL]
input_name = f"{event.data[ATTR_DEVICE]} channel {channel}"
if click_type in RPC_INPUTS_EVENTS_TYPES:
rpc_coordinator = get_rpc_coordinator_by_device_id(hass, device_id)
if rpc_coordinator and rpc_coordinator.device.initialized:
key = f"input:{channel-1}"
input_name = get_rpc_entity_name(rpc_coordinator.device, key)
elif click_type in BLOCK_INPUTS_EVENTS_TYPES:
block_coordinator = get_block_coordinator_by_device_id(hass, device_id)
if block_coordinator and block_coordinator.device.initialized:
input_name = f"{block_coordinator.device.name} channel {channel}"
return {
LOGBOOK_ENTRY_NAME: "Shelly",
LOGBOOK_ENTRY_MESSAGE: (
f"'{click_type}' click event for {input_name} Input was fired"
),
}
async_describe_event(DOMAIN, EVENT_SHELLY_CLICK, async_describe_shelly_click_event) |
Set up entities for block device. | def async_setup_block_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for block device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].block
assert coordinator
# Add Shelly Gas Valve as a switch
if coordinator.model == MODEL_GAS:
async_setup_block_attribute_entities(
hass,
async_add_entities,
coordinator,
{("valve", "valve"): GAS_VALVE_SWITCH},
BlockValveSwitch,
)
return
# In roller mode the relay blocks exist but do not contain required info
if (
coordinator.model in [MODEL_2, MODEL_25]
and coordinator.device.settings["mode"] != "relay"
):
return
relay_blocks = []
assert coordinator.device.blocks
for block in coordinator.device.blocks:
if (
block.type != "relay"
or block.channel is not None
and is_block_channel_type_light(
coordinator.device.settings, int(block.channel)
)
):
continue
relay_blocks.append(block)
unique_id = f"{coordinator.mac}-{block.type}_{block.channel}"
async_remove_shelly_entity(hass, "light", unique_id)
if not relay_blocks:
return
async_add_entities(BlockRelaySwitch(coordinator, block) for block in relay_blocks) |
Set up entities for RPC device. | def async_setup_rpc_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up entities for RPC device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].rpc
assert coordinator
switch_key_ids = get_rpc_key_ids(coordinator.device.status, "switch")
switch_ids = []
for id_ in switch_key_ids:
if is_rpc_channel_type_light(coordinator.device.config, id_):
continue
if coordinator.model == MODEL_WALL_DISPLAY:
# There are three configuration scenarios for WallDisplay:
# - relay mode (no thermostat)
# - thermostat mode using the internal relay as an actuator
# - thermostat mode using an external (from another device) relay as
# an actuator
if not is_rpc_thermostat_mode(id_, coordinator.device.status):
# The device is not in thermostat mode, we need to remove a climate
# entity
unique_id = f"{coordinator.mac}-thermostat:{id_}"
async_remove_shelly_entity(hass, "climate", unique_id)
elif is_rpc_thermostat_internal_actuator(coordinator.device.status):
# The internal relay is an actuator, skip this ID so as not to create
# a switch entity
continue
switch_ids.append(id_)
unique_id = f"{coordinator.mac}-switch:{id_}"
async_remove_shelly_entity(hass, "light", unique_id)
if not switch_ids:
return
async_add_entities(RpcRelaySwitch(coordinator, id_) for id_ in switch_ids) |
Remove a Shelly entity. | def async_remove_shelly_entity(
hass: HomeAssistant, domain: str, unique_id: str
) -> None:
"""Remove a Shelly entity."""
entity_reg = er_async_get(hass)
entity_id = entity_reg.async_get_entity_id(domain, DOMAIN, unique_id)
if entity_id:
LOGGER.debug("Removing entity: %s", entity_id)
entity_reg.async_remove(entity_id) |
Get number of channels for block type. | def get_number_of_channels(device: BlockDevice, block: Block) -> int:
"""Get number of channels for block type."""
channels = None
if block.type == "input":
# Shelly Dimmer/1L has two input channels and missing "num_inputs"
if device.settings["device"]["type"] in [
MODEL_DIMMER,
MODEL_DIMMER_2,
MODEL_1L,
]:
channels = 2
else:
channels = device.shelly.get("num_inputs")
elif block.type == "emeter":
channels = device.shelly.get("num_emeters")
elif block.type in ["relay", "light"]:
channels = device.shelly.get("num_outputs")
elif block.type in ["roller", "device"]:
channels = 1
return channels or 1 |
Naming for block based switch and sensors. | def get_block_entity_name(
device: BlockDevice,
block: Block | None,
description: str | None = None,
) -> str:
"""Naming for block based switch and sensors."""
channel_name = get_block_channel_name(device, block)
if description:
return f"{channel_name} {description.lower()}"
return channel_name |
Get name based on device and channel name. | def get_block_channel_name(device: BlockDevice, block: Block | None) -> str:
"""Get name based on device and channel name."""
entity_name = device.name
if (
not block
or block.type == "device"
or get_number_of_channels(device, block) == 1
):
return entity_name
assert block.channel
channel_name: str | None = None
mode = cast(str, block.type) + "s"
if mode in device.settings:
channel_name = device.settings[mode][int(block.channel)].get("name")
if channel_name:
return channel_name
if device.settings["device"]["type"] == MODEL_EM3:
base = ord("A")
else:
base = ord("1")
return f"{entity_name} channel {chr(int(block.channel)+base)}" |
Return true if block input button settings is set to a momentary type. | def is_block_momentary_input(
settings: dict[str, Any], block: Block, include_detached: bool = False
) -> bool:
"""Return true if block input button settings is set to a momentary type."""
momentary_types = ["momentary", "momentary_on_release"]
if include_detached:
momentary_types.append("detached")
# Shelly Button type is fixed to momentary and no btn_type
if settings["device"]["type"] in SHBTN_MODELS:
return True
if settings.get("mode") == "roller":
button_type = settings["rollers"][0]["button_type"]
return button_type in momentary_types
button = settings.get("relays") or settings.get("lights") or settings.get("inputs")
if button is None:
return False
# Shelly 1L has two button settings in the first channel
if settings["device"]["type"] == MODEL_1L:
channel = int(block.channel or 0) + 1
button_type = button[0].get("btn" + str(channel) + "_type")
else:
# Some devices has only one channel in settings
channel = min(int(block.channel or 0), len(button) - 1)
button_type = button[channel].get("btn_type")
return button_type in momentary_types |
Return device uptime string, tolerate up to 5 seconds deviation. | def get_device_uptime(uptime: float, last_uptime: datetime | None) -> datetime:
"""Return device uptime string, tolerate up to 5 seconds deviation."""
delta_uptime = utcnow() - timedelta(seconds=uptime)
if (
not last_uptime
or abs((delta_uptime - last_uptime).total_seconds()) > UPTIME_DEVIATION
):
return delta_uptime
return last_uptime |
Return list of input triggers for block. | def get_block_input_triggers(
device: BlockDevice, block: Block
) -> list[tuple[str, str]]:
"""Return list of input triggers for block."""
if "inputEvent" not in block.sensor_ids or "inputEventCnt" not in block.sensor_ids:
return []
if not is_block_momentary_input(device.settings, block, True):
return []
if block.type == "device" or get_number_of_channels(device, block) == 1:
subtype = "button"
else:
assert block.channel
subtype = f"button{int(block.channel)+1}"
if device.settings["device"]["type"] in SHBTN_MODELS:
trigger_types = SHBTN_INPUTS_EVENTS_TYPES
elif device.settings["device"]["type"] == MODEL_I3:
trigger_types = SHIX3_1_INPUTS_EVENTS_TYPES
else:
trigger_types = BASIC_INPUTS_EVENTS_TYPES
return [(trigger_type, subtype) for trigger_type in trigger_types] |
Return list of input triggers for SHBTN models. | def get_shbtn_input_triggers() -> list[tuple[str, str]]:
"""Return list of input triggers for SHBTN models."""
return [(trigger_type, "button") for trigger_type in SHBTN_INPUTS_EVENTS_TYPES] |
Return the device sleep period in seconds or 0 for non sleeping devices. | def get_block_device_sleep_period(settings: dict[str, Any]) -> int:
"""Return the device sleep period in seconds or 0 for non sleeping devices."""
sleep_period = 0
if settings.get("sleep_mode", False):
sleep_period = settings["sleep_mode"]["period"]
if settings["sleep_mode"]["unit"] == "h":
sleep_period *= 60 # hours to minutes
return sleep_period * 60 |
Return the device wakeup period in seconds or 0 for non sleeping devices. | def get_rpc_device_wakeup_period(status: dict[str, Any]) -> int:
"""Return the device wakeup period in seconds or 0 for non sleeping devices."""
return cast(int, status["sys"].get("wakeup_period", 0)) |
Return true if device has authorization enabled. | def get_info_auth(info: dict[str, Any]) -> bool:
"""Return true if device has authorization enabled."""
return cast(bool, info.get("auth") or info.get("auth_en")) |
Return the device generation from shelly info. | def get_info_gen(info: dict[str, Any]) -> int:
"""Return the device generation from shelly info."""
return int(info.get(CONF_GEN, 1)) |
Return the device model name. | def get_model_name(info: dict[str, Any]) -> str:
"""Return the device model name."""
if get_info_gen(info) in RPC_GENERATIONS:
return cast(str, MODEL_NAMES.get(info["model"], info["model"]))
return cast(str, MODEL_NAMES.get(info["type"], info["type"])) |
Get name based on device and channel name. | def get_rpc_channel_name(device: RpcDevice, key: str) -> str:
"""Get name based on device and channel name."""
key = key.replace("emdata", "em")
key = key.replace("em1data", "em1")
device_name = device.name
entity_name: str | None = None
if key in device.config:
entity_name = device.config[key].get("name", device_name)
if entity_name is None:
if key.startswith(("input:", "light:", "switch:")):
return f"{device_name} {key.replace(':', '_')}"
if key.startswith("em1"):
return f"{device_name} EM{key.split(':')[-1]}"
return device_name
return entity_name |
Naming for RPC based switch and sensors. | def get_rpc_entity_name(
device: RpcDevice, key: str, description: str | None = None
) -> str:
"""Naming for RPC based switch and sensors."""
channel_name = get_rpc_channel_name(device, key)
if description:
return f"{channel_name} {description.lower()}"
return channel_name |
Return the device generation from config entry. | def get_device_entry_gen(entry: ConfigEntry) -> int:
"""Return the device generation from config entry."""
return entry.data.get(CONF_GEN, 1) |
Return list of key instances for RPC device from a dict. | def get_rpc_key_instances(keys_dict: dict[str, Any], key: str) -> list[str]:
"""Return list of key instances for RPC device from a dict."""
if key in keys_dict:
return [key]
if key == "switch" and "cover:0" in keys_dict:
key = "cover"
return [k for k in keys_dict if k.startswith(f"{key}:")] |
Return list of key ids for RPC device from a dict. | def get_rpc_key_ids(keys_dict: dict[str, Any], key: str) -> list[int]:
"""Return list of key ids for RPC device from a dict."""
return [int(k.split(":")[1]) for k in keys_dict if k.startswith(f"{key}:")] |
Return true if rpc input button settings is set to a momentary type. | def is_rpc_momentary_input(
config: dict[str, Any], status: dict[str, Any], key: str
) -> bool:
"""Return true if rpc input button settings is set to a momentary type."""
return cast(bool, config[key]["type"] == "button") |
Return true if block channel appliance type is set to light. | def is_block_channel_type_light(settings: dict[str, Any], channel: int) -> bool:
"""Return true if block channel appliance type is set to light."""
app_type = settings["relays"][channel].get("appliance_type")
return app_type is not None and app_type.lower().startswith("light") |
Return true if rpc channel consumption type is set to light. | def is_rpc_channel_type_light(config: dict[str, Any], channel: int) -> bool:
"""Return true if rpc channel consumption type is set to light."""
con_types = config["sys"].get("ui_data", {}).get("consumption_types")
if con_types is None or len(con_types) <= channel:
return False
return cast(str, con_types[channel]).lower().startswith("light") |
Return true if the thermostat uses an internal relay. | def is_rpc_thermostat_internal_actuator(status: dict[str, Any]) -> bool:
"""Return true if the thermostat uses an internal relay."""
return cast(bool, status["sys"].get("relay_in_thermostat", False)) |
Return list of input triggers for RPC device. | def get_rpc_input_triggers(device: RpcDevice) -> list[tuple[str, str]]:
"""Return list of input triggers for RPC device."""
triggers = []
key_ids = get_rpc_key_ids(device.config, "input")
for id_ in key_ids:
key = f"input:{id_}"
if not is_rpc_momentary_input(device.config, device.status, key):
continue
for trigger_type in RPC_INPUTS_EVENTS_TYPES:
subtype = f"button{id_+1}"
triggers.append((trigger_type, subtype))
return triggers |
Update the firmware version information in the device registry. | def update_device_fw_info(
hass: HomeAssistant, shellydevice: BlockDevice | RpcDevice, entry: ConfigEntry
) -> None:
"""Update the firmware version information in the device registry."""
assert entry.unique_id
dev_reg = dr_async_get(hass)
if device := dev_reg.async_get_device(
identifiers={(DOMAIN, entry.entry_id)},
connections={(CONNECTION_NETWORK_MAC, format_mac(entry.unique_id))},
):
if device.sw_version == shellydevice.firmware_version:
return
LOGGER.debug("Updating device registry info for %s", entry.title)
dev_reg.async_update_device(device.id, sw_version=shellydevice.firmware_version) |
Convert brightness level to percentage. | def brightness_to_percentage(brightness: int) -> int:
"""Convert brightness level to percentage."""
return int(100 * (brightness + 1) / 255) |
Convert percentage to brightness level. | def percentage_to_brightness(percentage: int) -> int:
"""Convert percentage to brightness level."""
return round(255 * percentage / 100) |
Convert a name to a mac address. | def mac_address_from_name(name: str) -> str | None:
"""Convert a name to a mac address."""
mac = name.partition(".")[0].partition("-")[-1]
return mac.upper() if len(mac) == 12 else None |
Return release URL or None. | def get_release_url(gen: int, model: str, beta: bool) -> str | None:
"""Return release URL or None."""
if beta or model in DEVICES_WITHOUT_FIRMWARE_CHANGELOG:
return None
return GEN1_RELEASE_URL if gen in BLOCK_GENERATIONS else GEN2_RELEASE_URL |
Create a repair issue if the device runs an unsupported firmware. | def async_create_issue_unsupported_firmware(
hass: HomeAssistant, entry: ConfigEntry
) -> None:
"""Create a repair issue if the device runs an unsupported firmware."""
ir.async_create_issue(
hass,
DOMAIN,
FIRMWARE_UNSUPPORTED_ISSUE_ID.format(unique=entry.unique_id),
is_fixable=False,
is_persistent=False,
severity=ir.IssueSeverity.ERROR,
translation_key="unsupported_firmware",
translation_placeholders={
"device_name": entry.title,
"ip_address": entry.data["host"],
},
) |
Return true if rpc all WiFi stations are disabled. | def is_rpc_wifi_stations_disabled(
config: dict[str, Any], _status: dict[str, Any], key: str
) -> bool:
"""Return true if rpc all WiFi stations are disabled."""
if config[key]["sta"]["enable"] is True or config[key]["sta1"]["enable"] is True:
return False
return True |
Get port from config entry data. | def get_http_port(data: MappingProxyType[str, Any]) -> int:
"""Get port from config entry data."""
return cast(int, data.get(CONF_PORT, DEFAULT_HTTP_PORT)) |
Remove RPC based Shelly entity. | def async_remove_shelly_rpc_entities(
hass: HomeAssistant, domain: str, mac: str, keys: list[str]
) -> None:
"""Remove RPC based Shelly entity."""
entity_reg = er_async_get(hass)
for key in keys:
if entity_id := entity_reg.async_get_entity_id(domain, DOMAIN, f"{mac}-{key}"):
LOGGER.debug("Removing entity: %s", entity_id)
entity_reg.async_remove(entity_id) |
Return True if 'thermostat:<IDent>' is present in the status. | def is_rpc_thermostat_mode(ident: int, status: dict[str, Any]) -> bool:
"""Return True if 'thermostat:<IDent>' is present in the status."""
return f"thermostat:{ident}" in status |
Set up valve for device. | def async_setup_block_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up valve for device."""
coordinator = get_entry_data(hass)[config_entry.entry_id].block
assert coordinator and coordinator.device.blocks
if coordinator.model == MODEL_GAS:
async_setup_block_attribute_entities(
hass,
async_add_entities,
coordinator,
{("valve", "valve"): GAS_VALVE},
BlockShellyValve,
) |
Set up the Shodan sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Shodan sensor."""
api_key = config[CONF_API_KEY]
name = config[CONF_NAME]
query = config[CONF_QUERY]
data = ShodanData(shodan.Shodan(api_key), query)
try:
data.update()
except shodan.exception.APIError as error:
_LOGGER.warning("Unable to connect to Shodan.io: %s", error)
return
add_entities([ShodanSensor(data, name)], True) |
Handle getting shopping_list items. | def websocket_handle_items(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Handle getting shopping_list items."""
connection.send_message(
websocket_api.result_message(msg["id"], hass.data[DOMAIN].items)
) |
Handle reordering shopping_list items. | def websocket_handle_reorder(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Handle reordering shopping_list items."""
msg_id = msg.pop("id")
try:
hass.data[DOMAIN].async_reorder(msg.pop("item_ids"), connection.context(msg))
except NoMatchingShoppingListItem:
connection.send_error(
msg_id,
websocket_api.const.ERR_NOT_FOUND,
"One or more item id(s) not found.",
)
return
except vol.Invalid as err:
connection.send_error(msg_id, websocket_api.const.ERR_INVALID_FORMAT, f"{err}")
return
connection.send_result(msg_id) |
Generate binary sensors.
For each Account there is one power sensor with zone == 0.
For each Zone in each Account there is one smoke and one moisture sensor. | def generate_binary_sensors(entry: ConfigEntry) -> Iterable[SIABinarySensor]:
"""Generate binary sensors.
For each Account there is one power sensor with zone == 0.
For each Zone in each Account there is one smoke and one moisture sensor.
"""
for account_data in entry.data[CONF_ACCOUNTS]:
account = account_data[CONF_ACCOUNT]
zones = entry.options[CONF_ACCOUNTS][account][CONF_ZONES]
yield SIABinarySensorConnectivity(
entry, account, SIA_HUB_ZONE, ENTITY_DESCRIPTION_CONNECTIVITY
)
yield SIABinarySensor(entry, account, SIA_HUB_ZONE, ENTITY_DESCRIPTION_POWER)
for zone in range(1, zones + 1):
yield SIABinarySensor(entry, account, zone, ENTITY_DESCRIPTION_SMOKE)
yield SIABinarySensor(entry, account, zone, ENTITY_DESCRIPTION_MOISTURE) |
Validate the input by the user. | def validate_input(data: dict[str, Any]) -> dict[str, str] | None:
"""Validate the input by the user."""
try:
SIAAccount.validate_account(data[CONF_ACCOUNT], data.get(CONF_ENCRYPTION_KEY))
except InvalidKeyFormatError:
return {"base": "invalid_key_format"}
except InvalidKeyLengthError:
return {"base": "invalid_key_length"}
except InvalidAccountFormatError:
return {"base": "invalid_account_format"}
except InvalidAccountLengthError:
return {"base": "invalid_account_length"}
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception from SIAAccount")
return {"base": "unknown"}
if not 1 <= data[CONF_PING_INTERVAL] <= 1440:
return {"base": "invalid_ping"}
return validate_zones(data) |
Validate the zones field. | def validate_zones(data: dict[str, Any]) -> dict[str, str] | None:
"""Validate the zones field."""
if data[CONF_ZONES] == 0:
return {"base": "invalid_zones"}
return None |
Return the unique_id and name for an entity. | def get_unique_id_and_name(
entry_id: str,
port: int,
account: str,
zone: int,
entity_key: str,
) -> tuple[str, str]:
"""Return the unique_id and name for an entity."""
return (
(
f"{entry_id}_{account}_{zone}"
if entity_key == KEY_ALARM
else f"{entry_id}_{account}_{zone}_{entity_key}"
),
(
f"{port} - {account} - {entity_key}"
if zone == SIA_HUB_ZONE
else f"{port} - {account} - zone {zone} - {entity_key}"
),
) |
Return the interval to the next unavailability check. | def get_unavailability_interval(ping: int) -> float:
"""Return the interval to the next unavailability check."""
return timedelta(minutes=ping, seconds=PING_INTERVAL_MARGIN).total_seconds() |
Create the attributes dict from a SIAEvent. | def get_attr_from_sia_event(event: SIAEvent) -> dict[str, Any]:
"""Create the attributes dict from a SIAEvent."""
timestamp = event.timestamp if event.timestamp else utcnow()
return {
ATTR_ZONE: event.ri,
ATTR_CODE: event.code,
ATTR_MESSAGE: event.message,
ATTR_ID: event.id,
ATTR_TIMESTAMP: timestamp.isoformat()
if isinstance(timestamp, datetime)
else timestamp,
} |
Create a dict from the SIA Event for the HA Event. | def get_event_data_from_sia_event(event: SIAEvent) -> dict[str, Any]:
"""Create a dict from the SIA Event for the HA Event."""
return {
"message_type": event.message_type.value
if isinstance(event.message_type, MessageTypes)
else event.message_type,
"receiver": event.receiver,
"line": event.line,
"account": event.account,
"sequence": event.sequence,
"content": event.content,
"ti": event.ti,
"id": event.id,
"ri": event.ri,
"code": event.code,
"message": event.message,
"x_data": event.x_data,
"timestamp": event.timestamp.isoformat()
if isinstance(event.timestamp, datetime)
else event.timestamp,
"event_qualifier": event.event_qualifier,
"event_type": event.event_type,
"partition": event.partition,
"extended_data": [
{
"identifier": xd.identifier,
"name": xd.name,
"description": xd.description,
"length": xd.length,
"characters": xd.characters,
"value": xd.value,
}
for xd in event.extended_data
]
if event.extended_data is not None
else None,
"sia_code": {
"code": event.sia_code.code,
"type": event.sia_code.type,
"description": event.sia_code.description,
"concerns": event.sia_code.concerns,
}
if event.sia_code is not None
else None,
} |
Set up the sigfox sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the sigfox sensor."""
api_login = config[CONF_API_LOGIN]
api_password = config[CONF_API_PASSWORD]
name = config[CONF_NAME]
try:
sigfox = SigfoxAPI(api_login, api_password)
except ValueError:
return
auth = sigfox.auth
devices = sigfox.devices
add_entities((SigfoxDevice(device, auth, name) for device in devices), True) |
Take an ms since epoch and return datetime string. | def epoch_to_datetime(epoch_time):
"""Take an ms since epoch and return datetime string."""
return datetime.datetime.fromtimestamp(epoch_time).isoformat() |
Set up the platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the platform."""
# Validate credentials by processing image.
api_key = config[CONF_API_KEY]
account_type = config[CONF_ACCOUNT_TYPE]
api = hound.cloud(api_key, account_type)
try:
api.detect(b"Test")
except hound.SimplehoundException as exc:
_LOGGER.error("Sighthound error %s setup aborted", exc)
return
if save_file_folder := config.get(CONF_SAVE_FILE_FOLDER):
save_file_folder = Path(save_file_folder)
entities = []
for camera in config[CONF_SOURCE]:
sighthound = SighthoundEntity(
api,
camera[CONF_ENTITY_ID],
camera.get(CONF_NAME),
save_file_folder,
config[CONF_SAVE_TIMESTAMPTED_FILE],
)
entities.append(sighthound)
add_entities(entities) |
Get the SignalMessenger notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> SignalNotificationService:
"""Get the SignalMessenger notification service."""
sender_nr = config[CONF_SENDER_NR]
recp_nrs = config[CONF_RECP_NR]
signal_cli_rest_api_url = config[CONF_SIGNAL_CLI_REST_API]
signal_cli_rest_api = SignalCliRestApi(signal_cli_rest_api_url, sender_nr)
return SignalNotificationService(hass, recp_nrs, signal_cli_rest_api) |
Validate user input. | def validate_input(entry: dict[str, str]) -> dict[str, str] | None:
"""Validate user input."""
try:
if CONF_PASSWORD in entry:
send(
key=entry[CONF_DEVICE_KEY],
password=entry[CONF_PASSWORD],
salt=entry[CONF_SALT],
title="HA test",
message="Message delivered successfully",
)
else:
send(
key=entry[CONF_DEVICE_KEY],
title="HA test",
message="Message delivered successfully",
)
except UnknownError:
return {"base": "cannot_connect"}
return None |
Get a SimpliSafe OAuth code verifier and auth URL. | def async_get_simplisafe_oauth_values() -> SimpliSafeOAuthValues:
"""Get a SimpliSafe OAuth code verifier and auth URL."""
code_verifier = get_auth0_code_verifier()
code_challenge = get_auth0_code_challenge(code_verifier)
auth_url = get_auth_url(code_challenge)
return SimpliSafeOAuthValues(auth_url, code_verifier) |
Get the SimpliSafe system related to a service call (by device ID). | def _async_get_system_for_service_call(
hass: HomeAssistant, call: ServiceCall
) -> SystemType:
"""Get the SimpliSafe system related to a service call (by device ID)."""
device_id = call.data[ATTR_DEVICE_ID]
device_registry = dr.async_get(hass)
if (
alarm_control_panel_device_entry := device_registry.async_get(device_id)
) is None:
raise vol.Invalid("Invalid device ID specified")
assert alarm_control_panel_device_entry.via_device_id
if (
base_station_device_entry := device_registry.async_get(
alarm_control_panel_device_entry.via_device_id
)
) is None:
raise ValueError("No base station registered for alarm control panel")
[system_id_str] = [
identity[1]
for identity in base_station_device_entry.identifiers
if identity[0] == DOMAIN
]
system_id = int(system_id_str)
for entry_id in base_station_device_entry.config_entries:
if (simplisafe := hass.data[DOMAIN].get(entry_id)) is None:
continue
return cast(SystemType, simplisafe.systems[system_id])
raise ValueError(f"No system for device ID: {device_id}") |
Register a new bridge. | def _async_register_base_station(
hass: HomeAssistant, entry: ConfigEntry, system: SystemType
) -> None:
"""Register a new bridge."""
device_registry = dr.async_get(hass)
base_station = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, str(system.system_id))},
manufacturer="SimpliSafe",
model=system.version,
name=system.address,
)
# Check for an old system ID format and remove it:
if old_base_station := device_registry.async_get_device(
identifiers={(DOMAIN, system.system_id)} # type: ignore[arg-type]
):
# Update the new base station with any properties the user might have configured
# on the old base station:
device_registry.async_update_device(
base_station.id,
area_id=old_base_station.area_id,
disabled_by=old_base_station.disabled_by,
name_by_user=old_base_station.name_by_user,
)
device_registry.async_remove_device(old_base_station.id) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.