response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Find attributes with matching key from states.
def most_frequent_attribute(states: list[State], key: str) -> Any | None: """Find attributes with matching key from states.""" if attrs := list(find_state_attributes(states, key)): return max(set(attrs), key=attrs.count) return None
Return True if all states are equal. Note: Returns True if no matching attribute is found.
def states_equal(states: list[State]) -> bool: """Return True if all states are equal. Note: Returns True if no matching attribute is found. """ return _values_equal(find_state(states))
Return True if all values are equal. Note: Returns True if no matching attribute is found.
def _values_equal(values: Iterator[Any]) -> bool: """Return True if all values are equal. Note: Returns True if no matching attribute is found. """ grp = groupby(values) return bool(next(grp, True) and not next(grp, False))
Find the first attribute matching key from states. If none are found, return default.
def reduce_attribute( states: list[State], key: str, default: Any | None = None, reduce: Callable[..., Any] = mean_int, ) -> Any: """Find the first attribute matching key from states. If none are found, return default. """ attrs = list(find_state_attributes(states, key)) if not attrs: return default if len(attrs) == 1: return attrs[0] return reduce(*attrs)
Preprocess alternative configuration formats.
def _conf_preprocess(value: Any) -> dict[str, Any]: """Preprocess alternative configuration formats.""" if not isinstance(value, dict): return {CONF_ENTITIES: value} return value
Test if the group state is in its ON-state.
def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Test if the group state is in its ON-state.""" if REG_KEY not in hass.data: # Integration not setup yet, it cannot be on return False if (state := hass.states.get(entity_id)) is not None: registry: GroupIntegrationRegistry = hass.data[REG_KEY] return state.state in registry.on_off_mapping return False
Get all groups that contain this entity. Async friendly.
def groups_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]: """Get all groups that contain this entity. Async friendly. """ if DOMAIN not in hass.data: return [] return [ group.entity_id for group in hass.data[DOMAIN].entities if entity_id in group.tracking ]
Retrieve the device list for the selected plant.
def get_device_list(api, config): """Retrieve the device list for the selected plant.""" plant_id = config[CONF_PLANT_ID] # Log in to api and fetch first plant if no plant id is defined. login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD]) if ( not login_response["success"] and login_response["msg"] == LOGIN_INVALID_AUTH_CODE ): _LOGGER.error("Username, Password or URL may be incorrect!") return user_id = login_response["user"]["id"] if plant_id == DEFAULT_PLANT_ID: plant_info = api.plant_list(user_id) plant_id = plant_info["data"][0]["plantId"] # Get a list of devices for specified plant to add sensors for. devices = api.device_list(plant_id) return [devices, plant_id]
Set up the Gstreamer platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Gstreamer platform.""" name = config.get(CONF_NAME) pipeline = config.get(CONF_PIPELINE) player = GstreamerPlayer(pipeline) def _shutdown(call): """Quit the player on shutdown.""" player.quit() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) add_entities([GstreamerDevice(player, name)])
Get the next departure for the given schedule.
def get_next_departure( schedule: Any, start_station_id: Any, end_station_id: Any, offset: cv.time_period, include_tomorrow: bool = False, ) -> dict: """Get the next departure for the given schedule.""" now = dt_util.now().replace(tzinfo=None) + offset now_date = now.strftime(dt_util.DATE_STR_FORMAT) yesterday = now - datetime.timedelta(days=1) yesterday_date = yesterday.strftime(dt_util.DATE_STR_FORMAT) tomorrow = now + datetime.timedelta(days=1) tomorrow_date = tomorrow.strftime(dt_util.DATE_STR_FORMAT) # Fetch all departures for yesterday, today and optionally tomorrow, # up to an overkill maximum in case of a departure every minute for those # days. limit = 24 * 60 * 60 * 2 tomorrow_select = tomorrow_where = tomorrow_order = "" if include_tomorrow: limit = int(limit / 2 * 3) tomorrow_name = tomorrow.strftime("%A").lower() tomorrow_select = f"calendar.{tomorrow_name} AS tomorrow," tomorrow_where = f"OR calendar.{tomorrow_name} = 1" tomorrow_order = f"calendar.{tomorrow_name} DESC," sql_query = f""" SELECT trip.trip_id, trip.route_id, time(origin_stop_time.arrival_time) AS origin_arrival_time, time(origin_stop_time.departure_time) AS origin_depart_time, date(origin_stop_time.departure_time) AS origin_depart_date, origin_stop_time.drop_off_type AS origin_drop_off_type, origin_stop_time.pickup_type AS origin_pickup_type, origin_stop_time.shape_dist_traveled AS origin_dist_traveled, origin_stop_time.stop_headsign AS origin_stop_headsign, origin_stop_time.stop_sequence AS origin_stop_sequence, origin_stop_time.timepoint AS origin_stop_timepoint, time(destination_stop_time.arrival_time) AS dest_arrival_time, time(destination_stop_time.departure_time) AS dest_depart_time, destination_stop_time.drop_off_type AS dest_drop_off_type, destination_stop_time.pickup_type AS dest_pickup_type, destination_stop_time.shape_dist_traveled AS dest_dist_traveled, destination_stop_time.stop_headsign AS dest_stop_headsign, destination_stop_time.stop_sequence AS dest_stop_sequence, destination_stop_time.timepoint AS dest_stop_timepoint, calendar.{yesterday.strftime("%A").lower()} AS yesterday, calendar.{now.strftime("%A").lower()} AS today, {tomorrow_select} calendar.start_date AS start_date, calendar.end_date AS end_date FROM trips trip INNER JOIN calendar calendar ON trip.service_id = calendar.service_id INNER JOIN stop_times origin_stop_time ON trip.trip_id = origin_stop_time.trip_id INNER JOIN stops start_station ON origin_stop_time.stop_id = start_station.stop_id INNER JOIN stop_times destination_stop_time ON trip.trip_id = destination_stop_time.trip_id INNER JOIN stops end_station ON destination_stop_time.stop_id = end_station.stop_id WHERE (calendar.{yesterday.strftime("%A").lower()} = 1 OR calendar.{now.strftime("%A").lower()} = 1 {tomorrow_where} ) AND start_station.stop_id = :origin_station_id AND end_station.stop_id = :end_station_id AND origin_stop_sequence < dest_stop_sequence AND calendar.start_date <= :today AND calendar.end_date >= :today ORDER BY calendar.{yesterday.strftime("%A").lower()} DESC, calendar.{now.strftime("%A").lower()} DESC, {tomorrow_order} origin_stop_time.departure_time LIMIT :limit """ # noqa: S608 result = schedule.engine.connect().execute( text(sql_query), { "origin_station_id": start_station_id, "end_station_id": end_station_id, "today": now_date, "limit": limit, }, ) # Create lookup timetable for today and possibly tomorrow, taking into # account any departures from yesterday scheduled after midnight, # as long as all departures are within the calendar date range. timetable = {} yesterday_start = today_start = tomorrow_start = None yesterday_last = today_last = "" for row_cursor in result: row = row_cursor._asdict() if row["yesterday"] == 1 and yesterday_date >= row["start_date"]: extras = {"day": "yesterday", "first": None, "last": False} if yesterday_start is None: yesterday_start = row["origin_depart_date"] if yesterday_start != row["origin_depart_date"]: idx = f"{now_date} {row['origin_depart_time']}" timetable[idx] = {**row, **extras} yesterday_last = idx if row["today"] == 1: extras = {"day": "today", "first": False, "last": False} if today_start is None: today_start = row["origin_depart_date"] extras["first"] = True if today_start == row["origin_depart_date"]: idx_prefix = now_date else: idx_prefix = tomorrow_date idx = f"{idx_prefix} {row['origin_depart_time']}" timetable[idx] = {**row, **extras} today_last = idx if ( "tomorrow" in row and row["tomorrow"] == 1 and tomorrow_date <= row["end_date"] ): extras = {"day": "tomorrow", "first": False, "last": None} if tomorrow_start is None: tomorrow_start = row["origin_depart_date"] extras["first"] = True if tomorrow_start == row["origin_depart_date"]: idx = f"{tomorrow_date} {row['origin_depart_time']}" timetable[idx] = {**row, **extras} # Flag last departures. for idx in filter(None, [yesterday_last, today_last]): timetable[idx]["last"] = True _LOGGER.debug("Timetable: %s", sorted(timetable.keys())) item = {} for key in sorted(timetable.keys()): if dt_util.parse_datetime(key) > now: item = timetable[key] _LOGGER.debug( "Departure found for station %s @ %s -> %s", start_station_id, key, item ) break if item == {}: return {} # Format arrival and departure dates and times, accounting for the # possibility of times crossing over midnight. origin_arrival = now if item["origin_arrival_time"] > item["origin_depart_time"]: origin_arrival -= datetime.timedelta(days=1) origin_arrival_time = ( f"{origin_arrival.strftime(dt_util.DATE_STR_FORMAT)} " f"{item['origin_arrival_time']}" ) origin_depart_time = f"{now_date} {item['origin_depart_time']}" dest_arrival = now if item["dest_arrival_time"] < item["origin_depart_time"]: dest_arrival += datetime.timedelta(days=1) dest_arrival_time = ( f"{dest_arrival.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_arrival_time']}" ) dest_depart = dest_arrival if item["dest_depart_time"] < item["dest_arrival_time"]: dest_depart += datetime.timedelta(days=1) dest_depart_time = ( f"{dest_depart.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_depart_time']}" ) depart_time = dt_util.parse_datetime(origin_depart_time) arrival_time = dt_util.parse_datetime(dest_arrival_time) origin_stop_time = { "Arrival Time": origin_arrival_time, "Departure Time": origin_depart_time, "Drop Off Type": item["origin_drop_off_type"], "Pickup Type": item["origin_pickup_type"], "Shape Dist Traveled": item["origin_dist_traveled"], "Headsign": item["origin_stop_headsign"], "Sequence": item["origin_stop_sequence"], "Timepoint": item["origin_stop_timepoint"], } destination_stop_time = { "Arrival Time": dest_arrival_time, "Departure Time": dest_depart_time, "Drop Off Type": item["dest_drop_off_type"], "Pickup Type": item["dest_pickup_type"], "Shape Dist Traveled": item["dest_dist_traveled"], "Headsign": item["dest_stop_headsign"], "Sequence": item["dest_stop_sequence"], "Timepoint": item["dest_stop_timepoint"], } return { "trip_id": item["trip_id"], "route_id": item["route_id"], "day": item["day"], "first": item["first"], "last": item["last"], "departure_time": depart_time, "arrival_time": arrival_time, "origin_stop_time": origin_stop_time, "destination_stop_time": destination_stop_time, }
Set up the GTFS sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the GTFS sensor.""" gtfs_dir = hass.config.path(DEFAULT_PATH) data = config[CONF_DATA] origin = config.get(CONF_ORIGIN) destination = config.get(CONF_DESTINATION) name = config.get(CONF_NAME) offset: datetime.timedelta = config[CONF_OFFSET] include_tomorrow = config[CONF_TOMORROW] os.makedirs(gtfs_dir, exist_ok=True) if not os.path.exists(os.path.join(gtfs_dir, data)): _LOGGER.error("The given GTFS data file/folder was not found") return (gtfs_root, _) = os.path.splitext(data) sqlite_file = f"{gtfs_root}.sqlite?check_same_thread=False" joined_path = os.path.join(gtfs_dir, sqlite_file) gtfs = pygtfs.Schedule(joined_path) if not gtfs.feeds: pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, data)) add_entities( [GTFSDepartureSensor(gtfs, name, origin, destination, offset, include_tomorrow)] )
Get the device's 4-digit PIN from its zeroconf-discovered hostname.
def async_get_pin_from_discovery_hostname(hostname: str) -> str: """Get the device's 4-digit PIN from its zeroconf-discovered hostname.""" return hostname.split(".")[0].split("-")[1]
Get the device's 4-digit PIN from its UID.
def async_get_pin_from_uid(uid: str) -> str: """Get the device's 4-digit PIN from its UID.""" return uid[-4:]
Return if the valve is opening.
def is_open(data: dict[str, Any]) -> bool: """Return if the valve is opening.""" return data["state"] in ( GuardianValveState.FINISH_OPENING, GuardianValveState.OPEN, GuardianValveState.OPENING, GuardianValveState.START_OPENING, )
Remove old entities and create a repairs issue with info on their replacement.
def async_finish_entity_domain_replacements( hass: HomeAssistant, entry: ConfigEntry, entity_replacement_strategies: Iterable[EntityDomainReplacementStrategy], ) -> None: """Remove old entities and create a repairs issue with info on their replacement.""" ent_reg = er.async_get(hass) for strategy in entity_replacement_strategies: try: [registry_entry] = [ registry_entry for registry_entry in er.async_entries_for_config_entry( ent_reg, entry.entry_id ) if registry_entry.domain == strategy.old_domain and registry_entry.unique_id == strategy.old_unique_id ] except ValueError: continue old_entity_id = registry_entry.entity_id LOGGER.info('Removing old entity: "%s"', old_entity_id) ent_reg.async_remove(old_entity_id)
Decorate to handle exceptions from the Guardian API.
def convert_exceptions_to_homeassistant_error( func: Callable[Concatenate[_GuardianEntityT, _P], Coroutine[Any, Any, Any]], ) -> Callable[Concatenate[_GuardianEntityT, _P], Coroutine[Any, Any, None]]: """Decorate to handle exceptions from the Guardian API.""" @wraps(func) async def wrapper( entity: _GuardianEntityT, *args: _P.args, **kwargs: _P.kwargs ) -> None: """Wrap the provided function.""" try: await func(entity, *args, **kwargs) except GuardianError as err: raise HomeAssistantError( f"Error while calling {func.__name__}: {err}" ) from err return wrapper
Return if the valve is closing.
def is_closing(data: dict[str, Any]) -> bool: """Return if the valve is closing.""" return data["state"] in ( GuardianValveState.CLOSING, GuardianValveState.FINISH_CLOSING, GuardianValveState.START_CLOSING, )
Return if the valve is opening.
def is_opening(data: dict[str, Any]) -> bool: """Return if the valve is opening.""" return data["state"] in ( GuardianValveState.OPENING, GuardianValveState.FINISH_OPENING, GuardianValveState.START_OPENING, )
Get the entry ID related to a service call (by device ID).
def async_get_entry_id_for_service_call(hass: HomeAssistant, call: ServiceCall) -> str: """Get the entry ID related to a service call (by device ID).""" device_id = call.data[CONF_DEVICE_ID] device_registry = dr.async_get(hass) if (device_entry := device_registry.async_get(device_id)) is None: raise ValueError(f"Invalid Guardian device ID: {device_id}") for entry_id in device_entry.config_entries: if (entry := hass.config_entries.async_get_entry(entry_id)) is None: continue if entry.domain == DOMAIN: return entry_id raise ValueError(f"No config entry for device ID: {device_id}")
Validate that all API users are unique.
def has_all_unique_users(value): """Validate that all API users are unique.""" api_users = [user[CONF_API_USER] for user in value] has_unique_values(api_users) return value
Validate that all user's names are unique and set if any is set.
def has_all_unique_users_names(value): """Validate that all user's names are unique and set if any is set.""" names = [user.get(CONF_NAME) for user in value] if None in names and any(name is not None for name in names): raise vol.Invalid("user names of all users must be set if any is set") if not all(name is None for name in names): has_unique_values(names) return value
Return board info.
def async_info(hass: HomeAssistant) -> list[HardwareInfo]: """Return board info.""" if (os_info := get_os_info(hass)) is None: raise HomeAssistantError board: str | None if (board := os_info.get("board")) is None: raise HomeAssistantError if not board.startswith("odroid"): raise HomeAssistantError config_entries = [ entry.entry_id for entry in hass.config_entries.async_entries(DOMAIN) ] return [ HardwareInfo( board=BoardInfo( hassio_board_id=board, manufacturer=DOMAIN, model=board, revision=None, ), config_entries=config_entries, dongle=None, name=BOARD_NAMES.get(board, f"Unknown hardkernel Odroid model '{board}'"), url=None, ) ]
Register a hardware platform.
def _register_hardware_platform( hass: HomeAssistant, integration_domain: str, platform: HardwareProtocol ) -> None: """Register a hardware platform.""" if integration_domain == DOMAIN: return if not hasattr(platform, "async_info"): raise HomeAssistantError(f"Invalid hardware platform {platform}") hass.data[DOMAIN]["hardware_platform"][integration_domain] = platform
Subscribe to system status updates.
def ws_subscribe_system_status( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Subscribe to system status updates.""" system_status: SystemStatus = hass.data[DOMAIN]["system_status"] @callback def async_update_status(now: datetime) -> None: # Although cpu_percent and virtual_memory access files in the /proc vfs, those # accesses do not block and we don't need to wrap the calls in an executor. # https://elixir.bootlin.com/linux/v5.19.4/source/fs/proc/stat.c # https://elixir.bootlin.com/linux/v5.19.4/source/fs/proc/meminfo.c#L32 cpu_percentage = round( system_status.ha_psutil.psutil.cpu_percent(interval=None) ) virtual_memory = system_status.ha_psutil.psutil.virtual_memory() json_msg = { "cpu_percent": cpu_percentage, "memory_used_percent": virtual_memory.percent, "memory_used_mb": round( (virtual_memory.total - virtual_memory.available) / 1024**2, 1 ), "memory_free_mb": round(virtual_memory.available / 1024**2, 1), "timestamp": dt_util.utcnow().isoformat(), } for connection, msg_id in system_status.subscribers: connection.send_message(websocket_api.event_message(msg_id, json_msg)) if not system_status.subscribers: system_status.remove_periodic_timer = async_track_time_interval( hass, async_update_status, timedelta(seconds=5) ) system_status.subscribers.add((connection, msg["id"])) @callback def cancel_subscription() -> None: system_status.subscribers.remove((connection, msg["id"])) if not system_status.subscribers and system_status.remove_periodic_timer: system_status.remove_periodic_timer() system_status.remove_periodic_timer = None connection.subscriptions[msg["id"]] = cancel_subscription connection.send_message(websocket_api.result_message(msg["id"]))
Set up the AVR platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discover_info: DiscoveryInfoType | None = None, ) -> None: """Set up the AVR platform.""" name = config[CONF_NAME] host = config[CONF_HOST] port = config[CONF_PORT] avr = hkavr.HkAVR(host, port, name) avr_device = HkAvrDevice(avr) add_entities([avr_device], True)
Find the unique id for both websocket and xmpp clients.
def find_unique_id_for_remote(harmony: HarmonyAPI): """Find the unique id for both websocket and xmpp clients.""" if harmony.hub_id is not None: return str(harmony.hub_id) # fallback timeStampHash if Hub ID is not available return harmony.config["global"]["timeStampHash"].split(";")[-1]
Find the best name from config or fallback to the remote.
def find_best_name_for_remote(data: dict, harmony: HarmonyAPI): """Find the best name from config or fallback to the remote.""" # As a last resort we get the name from the harmony client # in the event a name was not provided. harmony.name is # usually the ip address but it can be an empty string. if CONF_NAME not in data or data[CONF_NAME] is None or data[CONF_NAME] == "": return harmony.name return data[CONF_NAME]
Handle HassioAPIError and raise a specific AddonError.
def api_error( error_message: str, ) -> Callable[ [_FuncType[_AddonManagerT, _P, _R]], _ReturnFuncType[_AddonManagerT, _P, _R] ]: """Handle HassioAPIError and raise a specific AddonError.""" def handle_hassio_api_error( func: _FuncType[_AddonManagerT, _P, _R], ) -> _ReturnFuncType[_AddonManagerT, _P, _R]: """Handle a HassioAPIError.""" @wraps(func) async def wrapper( self: _AddonManagerT, *args: _P.args, **kwargs: _P.kwargs ) -> _R: """Wrap an add-on manager method.""" try: return_value = await func(self, *args, **kwargs) except HassioAPIError as err: raise AddonError( f"{error_message.format(addon_name=self.addon_name)}: {err}" ) from err return return_value return wrapper return handle_hassio_api_error
Auth setup.
def async_setup_auth_view(hass: HomeAssistant, user: User) -> None: """Auth setup.""" hassio_auth = HassIOAuth(hass, user) hassio_password_reset = HassIOPasswordReset(hass, user) hass.http.register_view(hassio_auth) hass.http.register_view(hassio_password_reset)
Return generic information from Supervisor. Async friendly.
def get_info(hass: HomeAssistant) -> dict[str, Any] | None: """Return generic information from Supervisor. Async friendly. """ return hass.data.get(DATA_INFO)
Return generic host information. Async friendly.
def get_host_info(hass: HomeAssistant) -> dict[str, Any] | None: """Return generic host information. Async friendly. """ return hass.data.get(DATA_HOST_INFO)
Return store information. Async friendly.
def get_store(hass: HomeAssistant) -> dict[str, Any] | None: """Return store information. Async friendly. """ return hass.data.get(DATA_STORE)
Return Supervisor information. Async friendly.
def get_supervisor_info(hass: HomeAssistant) -> dict[str, Any] | None: """Return Supervisor information. Async friendly. """ return hass.data.get(DATA_SUPERVISOR_INFO)
Return Addons info. Async friendly.
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any]] | None: """Return Addons info. Async friendly. """ return hass.data.get(DATA_ADDONS_INFO)
Return Addons stats. Async friendly.
def get_addons_stats(hass: HomeAssistant) -> dict[str, Any]: """Return Addons stats. Async friendly. """ return hass.data.get(DATA_ADDONS_STATS) or {}
Return core stats. Async friendly.
def get_core_stats(hass: HomeAssistant) -> dict[str, Any]: """Return core stats. Async friendly. """ return hass.data.get(DATA_CORE_STATS) or {}
Return supervisor stats. Async friendly.
def get_supervisor_stats(hass: HomeAssistant) -> dict[str, Any]: """Return supervisor stats. Async friendly. """ return hass.data.get(DATA_SUPERVISOR_STATS) or {}
Return Addons changelogs. Async friendly.
def get_addons_changelogs(hass: HomeAssistant): """Return Addons changelogs. Async friendly. """ return hass.data.get(DATA_ADDONS_CHANGELOGS)
Return OS information. Async friendly.
def get_os_info(hass: HomeAssistant) -> dict[str, Any] | None: """Return OS information. Async friendly. """ return hass.data.get(DATA_OS_INFO)
Return Home Assistant Core information from Supervisor. Async friendly.
def get_core_info(hass: HomeAssistant) -> dict[str, Any] | None: """Return Home Assistant Core information from Supervisor. Async friendly. """ return hass.data.get(DATA_CORE_INFO)
Return Supervisor issues info. Async friendly.
def get_issues_info(hass: HomeAssistant) -> SupervisorIssues | None: """Return Supervisor issues info. Async friendly. """ return hass.data.get(DATA_KEY_SUPERVISOR_ISSUES)
Register addons in the device registry.
def async_register_addons_in_dev_reg( entry_id: str, dev_reg: dr.DeviceRegistry, addons: list[dict[str, Any]] ) -> None: """Register addons in the device registry.""" for addon in addons: params = DeviceInfo( identifiers={(DOMAIN, addon[ATTR_SLUG])}, model=SupervisorEntityModel.ADDON, sw_version=addon[ATTR_VERSION], name=addon[ATTR_NAME], entry_type=dr.DeviceEntryType.SERVICE, configuration_url=f"homeassistant://hassio/addon/{addon[ATTR_SLUG]}", ) if manufacturer := addon.get(ATTR_REPOSITORY) or addon.get(ATTR_URL): params[ATTR_MANUFACTURER] = manufacturer dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
Register OS in the device registry.
def async_register_os_in_dev_reg( entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any] ) -> None: """Register OS in the device registry.""" params = DeviceInfo( identifiers={(DOMAIN, "OS")}, manufacturer="Home Assistant", model=SupervisorEntityModel.OS, sw_version=os_dict[ATTR_VERSION], name="Home Assistant Operating System", entry_type=dr.DeviceEntryType.SERVICE, ) dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
Register host in the device registry.
def async_register_host_in_dev_reg( entry_id: str, dev_reg: dr.DeviceRegistry, ) -> None: """Register host in the device registry.""" params = DeviceInfo( identifiers={(DOMAIN, "host")}, manufacturer="Home Assistant", model=SupervisorEntityModel.HOST, name="Home Assistant Host", entry_type=dr.DeviceEntryType.SERVICE, ) dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
Register OS in the device registry.
def async_register_core_in_dev_reg( entry_id: str, dev_reg: dr.DeviceRegistry, core_dict: dict[str, Any], ) -> None: """Register OS in the device registry.""" params = DeviceInfo( identifiers={(DOMAIN, "core")}, manufacturer="Home Assistant", model=SupervisorEntityModel.CORE, sw_version=core_dict[ATTR_VERSION], name="Home Assistant Core", entry_type=dr.DeviceEntryType.SERVICE, ) dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
Register OS in the device registry.
def async_register_supervisor_in_dev_reg( entry_id: str, dev_reg: dr.DeviceRegistry, supervisor_dict: dict[str, Any], ) -> None: """Register OS in the device registry.""" params = DeviceInfo( identifiers={(DOMAIN, "supervisor")}, manufacturer="Home Assistant", model=SupervisorEntityModel.SUPERVIOSR, sw_version=supervisor_dict[ATTR_VERSION], name="Home Assistant Supervisor", entry_type=dr.DeviceEntryType.SERVICE, ) dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
Remove addons from the device registry.
def async_remove_addons_from_dev_reg( dev_reg: dr.DeviceRegistry, addons: set[str] ) -> None: """Remove addons from the device registry.""" for addon_slug in addons: if dev := dev_reg.async_get_device(identifiers={(DOMAIN, addon_slug)}): dev_reg.async_remove_device(dev.id)
Discovery setup.
def async_setup_discovery_view(hass: HomeAssistant, hassio: HassIO) -> None: """Discovery setup.""" hassio_discovery = HassIODiscovery(hass, hassio) hass.http.register_view(hassio_discovery) # Handle exists discovery messages async def _async_discovery_start_handler(event: Event) -> None: """Process all exists discovery on startup.""" try: data = await hassio.retrieve_discovery_messages() except HassioAPIError as err: _LOGGER.error("Can't read discover info: %s", err) return jobs = [ asyncio.create_task(hassio_discovery.async_process_new(discovery)) for discovery in data[ATTR_DISCOVERY] ] if jobs: await asyncio.wait(jobs) hass.bus.async_listen_once( EVENT_HOMEASSISTANT_START, _async_discovery_start_handler )
Return a boolean.
def _api_bool( funct: Callable[_P, Coroutine[Any, Any, dict[str, Any]]], ) -> Callable[_P, Coroutine[Any, Any, bool]]: """Return a boolean.""" async def _wrapper(*argv: _P.args, **kwargs: _P.kwargs) -> bool: """Wrap function.""" try: data = await funct(*argv, **kwargs) return data["result"] == "ok" except HassioAPIError: return False return _wrapper
Return data of an api.
def api_data( funct: Callable[_P, Coroutine[Any, Any, dict[str, Any]]], ) -> Callable[_P, Coroutine[Any, Any, Any]]: """Return data of an api.""" async def _wrapper(*argv: _P.args, **kwargs: _P.kwargs) -> Any: """Wrap function.""" data = await funct(*argv, **kwargs) if data["result"] == "ok": return data["data"] raise HassioAPIError(data["message"]) return _wrapper
Create response header.
def _response_header(response: aiohttp.ClientResponse, path: str) -> dict[str, str]: """Create response header.""" headers = { name: value for name, value in response.headers.items() if name not in RESPONSE_HEADERS_FILTER } if NO_STORE.match(path): headers[CACHE_CONTROL] = "no-store, max-age=0" return headers
Return timeout for a URL path.
def _get_timeout(path: str) -> ClientTimeout: """Return timeout for a URL path.""" if NO_TIMEOUT.match(path): return ClientTimeout(connect=10, total=None) return ClientTimeout(connect=10, total=300)
Return if we should compress a response.
def should_compress(content_type: str) -> bool: """Return if we should compress a response.""" if content_type.startswith("image/"): return "svg" in content_type if content_type.startswith("application/"): return ( "json" in content_type or "xml" in content_type or "javascript" in content_type ) return not content_type.startswith(("video/", "audio/", "font/"))
Auth setup.
def async_setup_ingress_view(hass: HomeAssistant, host: str) -> None: """Auth setup.""" websession = async_get_clientsession(hass) hassio_ingress = HassIOIngress(host, websession) hass.http.register_view(hassio_ingress)
Create X-Forwarded-For header.
def _forwarded_for_header(forward_for: str | None, peer_name: str) -> str: """Create X-Forwarded-For header.""" connected_ip = ip_address(peer_name) return f"{forward_for}, {connected_ip!s}" if forward_for else f"{connected_ip!s}"
Create initial header.
def _init_header(request: web.Request, token: str) -> CIMultiDict | dict[str, str]: """Create initial header.""" headers = { name: value for name, value in request.headers.items() if name not in INIT_HEADERS_FILTER } # Ingress information headers[X_HASS_SOURCE] = "core.ingress" headers[X_INGRESS_PATH] = f"/api/hassio_ingress/{token}" # Set X-Forwarded-For forward_for = request.headers.get(hdrs.X_FORWARDED_FOR) assert request.transport if (peername := request.transport.get_extra_info("peername")) is None: _LOGGER.error("Can't set forward_for header, missing peername") raise HTTPBadRequest headers[hdrs.X_FORWARDED_FOR] = _forwarded_for_header(forward_for, peername[0]) # Set X-Forwarded-Host if not (forward_host := request.headers.get(hdrs.X_FORWARDED_HOST)): forward_host = request.host headers[hdrs.X_FORWARDED_HOST] = forward_host # Set X-Forwarded-Proto forward_proto = request.headers.get(hdrs.X_FORWARDED_PROTO) if not forward_proto: forward_proto = request.scheme headers[hdrs.X_FORWARDED_PROTO] = forward_proto return headers
Create response header.
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]: """Create response header.""" return { name: value for name, value in response.headers.items() if name not in RESPONSE_HEADERS_FILTER }
Return True if request is a websocket.
def _is_websocket(request: web.Request) -> bool: """Return True if request is a websocket.""" headers = request.headers return bool( "upgrade" in headers.get(hdrs.CONNECTION, "").lower() and headers.get(hdrs.UPGRADE, "").lower() == "websocket" )
Register system health callbacks.
def async_register( hass: HomeAssistant, register: system_health.SystemHealthRegistration ) -> None: """Register system health callbacks.""" register.async_register_info(system_health_info)
Set up the websocket API.
def async_load_websocket_api(hass: HomeAssistant) -> None: """Set up the websocket API.""" websocket_api.async_register_command(hass, websocket_supervisor_event) websocket_api.async_register_command(hass, websocket_supervisor_api) websocket_api.async_register_command(hass, websocket_subscribe)
Subscribe to supervisor events.
def websocket_subscribe( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Subscribe to supervisor events.""" @callback def forward_messages(data: dict[str, str]) -> None: """Forward events to websocket.""" connection.send_message(websocket_api.event_message(msg[WS_ID], data)) connection.subscriptions[msg[WS_ID]] = async_dispatcher_connect( hass, EVENT_SUPERVISOR_EVENT, forward_messages ) connection.send_message(websocket_api.result_message(msg[WS_ID]))
Publish events from the Supervisor.
def websocket_supervisor_event( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Publish events from the Supervisor.""" connection.send_result(msg[WS_ID]) async_dispatcher_send(hass, EVENT_SUPERVISOR_EVENT, msg[ATTR_DATA])
Validate value is a valid addon slug.
def valid_addon(value: Any) -> str: """Validate value is a valid addon slug.""" value = VALID_ADDON_SLUG(value) hass: HomeAssistant | None = None with suppress(HomeAssistantError): hass = async_get_hass() if hass and (addons := get_addons_info(hass)) is not None and value not in addons: raise vol.Invalid("Not a valid add-on slug") return value
Return hostname of add-on.
def hostname_from_addon_slug(addon_slug: str) -> str: """Return hostname of add-on.""" return addon_slug.replace("_", "-")
Return true if Hass.io is loaded. Async friendly.
def is_hassio(hass: HomeAssistant) -> bool: """Return true if Hass.io is loaded. Async friendly. """ return DOMAIN in hass.config.components
Return the supervisor ip address.
def get_supervisor_ip() -> str | None: """Return the supervisor ip address.""" if "SUPERVISOR" not in os.environ: return None return os.environ["SUPERVISOR"].partition(":")[0]
Set up the HaveIBeenPwned sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the HaveIBeenPwned sensor.""" emails = config[CONF_EMAIL] api_key = config[CONF_API_KEY] data = HaveIBeenPwnedData(emails, api_key) add_entities(HaveIBeenPwnedSensor(data, email) for email in emails)
Set up the HDDTemp sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the HDDTemp sensor.""" name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) disks = config.get(CONF_DISKS) hddtemp = HddTempData(host, port) hddtemp.update() if not disks: disks = [next(iter(hddtemp.data)).split("|")[0]] add_entities((HddTempSensor(name, disk, hddtemp) for disk in disks), True)
Find and return HDMI devices as +switches.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return HDMI devices as +switches.""" if discovery_info and ATTR_NEW in discovery_info: _LOGGER.debug("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data[DOMAIN][device] entities.append(CecPlayerEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True)
Find and return HDMI devices as switches.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return HDMI devices as switches.""" if discovery_info and ATTR_NEW in discovery_info: _LOGGER.info("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data[DOMAIN][device] entities.append(CecSwitchEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True)
Right-pad a physical address.
def pad_physical_address(addr): """Right-pad a physical address.""" return addr + [0] * (4 - len(addr))
Parse configuration device mapping.
def parse_mapping(mapping, parents=None): """Parse configuration device mapping.""" if parents is None: parents = [] for addr, val in mapping.items(): if isinstance(addr, (str,)) and isinstance(val, (str,)): yield (addr, PhysicalAddress(val)) else: cur = [*parents, addr] if isinstance(val, dict): yield from parse_mapping(val, cur) elif isinstance(val, str): yield (val, pad_physical_address(cur))
Set up the CEC capability.
def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901 """Set up the CEC capability.""" hass.data[DOMAIN] = {} # Parse configuration into a dict of device name to physical address # represented as a list of four elements. device_aliases = {} devices = base_config[DOMAIN].get(CONF_DEVICES, {}) _LOGGER.debug("Parsing config %s", devices) device_aliases.update(parse_mapping(devices)) _LOGGER.debug("Parsed devices: %s", device_aliases) platform = base_config[DOMAIN].get(CONF_PLATFORM, SWITCH) loop = ( # Create own thread if more than 1 CPU hass.loop if multiprocessing.cpu_count() < 2 else None ) host = base_config[DOMAIN].get(CONF_HOST) display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME) if host: adapter = TcpAdapter(host, name=display_name, activate_source=False) else: adapter = CecAdapter(name=display_name[:12], activate_source=False) hdmi_network = HDMINetwork(adapter, loop=loop) def _adapter_watchdog(now=None): _LOGGER.debug("Reached _adapter_watchdog") event.call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job) if not adapter.initialized: _LOGGER.info("Adapter not initialized; Trying to restart") hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE) adapter.init() _adapter_watchdog_job = HassJob(_adapter_watchdog, cancel_on_shutdown=True) @callback def _async_initialized_callback(*_: Any): """Add watchdog on initialization.""" return event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog_job) hdmi_network.set_initialized_callback(_async_initialized_callback) def _volume(call: ServiceCall) -> None: """Increase/decrease volume and mute/unmute system.""" mute_key_mapping = { ATTR_TOGGLE: KEY_MUTE_TOGGLE, ATTR_ON: KEY_MUTE_ON, ATTR_OFF: KEY_MUTE_OFF, } for cmd, att in call.data.items(): if cmd == CMD_UP: _process_volume(KEY_VOLUME_UP, att) elif cmd == CMD_DOWN: _process_volume(KEY_VOLUME_DOWN, att) elif cmd == CMD_MUTE: hdmi_network.send_command( KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM) ) hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) _LOGGER.info("Audio muted") else: _LOGGER.warning("Unknown command %s", cmd) def _process_volume(cmd, att): if isinstance(att, (str,)): att = att.strip() if att == CMD_PRESS: hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM)) elif att == CMD_RELEASE: hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) else: att = 1 if att == "" else int(att) for _ in range(att): hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM)) hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) def _tx(call: ServiceCall) -> None: """Send CEC command.""" data = call.data if ATTR_RAW in data: command = CecCommand(data[ATTR_RAW]) else: src = data.get(ATTR_SRC, ADDR_UNREGISTERED) dst = data.get(ATTR_DST, ADDR_BROADCAST) if ATTR_CMD in data: cmd = data[ATTR_CMD] else: _LOGGER.error("Attribute 'cmd' is missing") return if ATTR_ATT in data: if isinstance(data[ATTR_ATT], (list,)): att = data[ATTR_ATT] else: att = reduce(lambda x, y: f"{x}:{y:x}", data[ATTR_ATT]) else: att = "" command = CecCommand(cmd, dst, src, att) hdmi_network.send_command(command) def _standby(call: ServiceCall) -> None: hdmi_network.standby() def _power_on(call: ServiceCall) -> None: hdmi_network.power_on() def _select_device(call: ServiceCall) -> None: """Select the active device.""" if not (addr := call.data[ATTR_DEVICE]): _LOGGER.error("Device not found: %s", call.data[ATTR_DEVICE]) return if addr in device_aliases: addr = device_aliases[addr] else: entity = hass.states.get(addr) _LOGGER.debug("Selecting entity %s", entity) if entity is not None: addr = entity.attributes["physical_address"] _LOGGER.debug("Address acquired: %s", addr) if addr is None: _LOGGER.error( "Device %s has not physical address", call.data[ATTR_DEVICE] ) return if not isinstance(addr, (PhysicalAddress,)): addr = PhysicalAddress(addr) hdmi_network.active_source(addr) _LOGGER.info("Selected %s (%s)", call.data[ATTR_DEVICE], addr) def _update(call: ServiceCall) -> None: """Update if device update is needed. Called by service, requests CEC network to update data. """ hdmi_network.scan() def _new_device(device): """Handle new devices which are detected by HDMI network.""" key = f"{DOMAIN}.{device.name}" hass.data[DOMAIN][key] = device ent_platform = base_config[DOMAIN][CONF_TYPES].get(key, platform) discovery.load_platform( hass, ent_platform, DOMAIN, discovered={ATTR_NEW: [key]}, hass_config=base_config, ) def _shutdown(call): hdmi_network.stop() def _start_cec(callback_event): """Register services and start HDMI network to watch for devices.""" hass.services.register( DOMAIN, SERVICE_SEND_COMMAND, _tx, SERVICE_SEND_COMMAND_SCHEMA ) hass.services.register( DOMAIN, SERVICE_VOLUME, _volume, schema=SERVICE_VOLUME_SCHEMA ) hass.services.register( DOMAIN, SERVICE_UPDATE_DEVICES, _update, schema=SERVICE_UPDATE_DEVICES_SCHEMA, ) hass.services.register(DOMAIN, SERVICE_POWER_ON, _power_on) hass.services.register(DOMAIN, SERVICE_STANDBY, _standby) hass.services.register(DOMAIN, SERVICE_SELECT_DEVICE, _select_device) hdmi_network.set_new_device_callback(_new_device) hdmi_network.start() hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_cec) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown) return True
Set up the heatmiser thermostat.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the heatmiser thermostat.""" heatmiser_v3_thermostat = heatmiser.HeatmiserThermostat host = config[CONF_HOST] port = config[CONF_PORT] thermostats = config[CONF_THERMOSTATS] uh1_hub = connection.HeatmiserUH1(host, port) add_entities( [ HeatmiserV3Thermostat(heatmiser_v3_thermostat, thermostat, uh1_hub) for thermostat in thermostats ], True, )
Format the title for config entries.
def format_title(host: str) -> str: """Format the title for config entries.""" return f"Controller ({host})"
Return decorator that logs command failure.
def log_command_error(command: str) -> Callable[[_FuncType[_P]], _ReturnFuncType[_P]]: """Return decorator that logs command failure.""" def decorator(func: _FuncType[_P]) -> _ReturnFuncType[_P]: @wraps(func) async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> None: try: await func(*args, **kwargs) except (HeosError, ValueError) as ex: _LOGGER.error("Unable to %s: %s", command, ex) return wrapper return decorator
Register HEOS services.
def register(hass: HomeAssistant, controller: Heos): """Register HEOS services.""" hass.services.async_register( DOMAIN, SERVICE_SIGN_IN, functools.partial(_sign_in_handler, controller), schema=HEOS_SIGN_IN_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_SIGN_OUT, functools.partial(_sign_out_handler, controller), schema=HEOS_SIGN_OUT_SCHEMA, )
Unregister HEOS services.
def remove(hass: HomeAssistant): """Unregister HEOS services.""" hass.services.async_remove(DOMAIN, SERVICE_SIGN_IN) hass.services.async_remove(DOMAIN, SERVICE_SIGN_OUT)
Get a populated schema or default.
def get_user_step_schema(data: dict[str, Any]) -> vol.Schema: """Get a populated schema or default.""" travel_mode = data.get(CONF_MODE, TRAVEL_MODE_CAR) if travel_mode == "publicTransportTimeTable": travel_mode = TRAVEL_MODE_PUBLIC return vol.Schema( { vol.Optional( CONF_NAME, default=data.get(CONF_NAME, DEFAULT_NAME) ): cv.string, vol.Required(CONF_API_KEY, default=data.get(CONF_API_KEY)): cv.string, vol.Optional( CONF_MODE, default=data.get(CONF_MODE, TRAVEL_MODE_CAR) ): vol.In(TRAVEL_MODES), } )
Prepare parameters for the HERE api.
def prepare_parameters( hass: HomeAssistant, config: HERETravelTimeConfig, ) -> tuple[list[str], list[str], str | None, str | None]: """Prepare parameters for the HERE api.""" def _from_entity_id(entity_id: str) -> list[str]: coordinates = find_coordinates(hass, entity_id) if coordinates is None: raise UpdateFailed(f"No coordinates found for {entity_id}") if coordinates is entity_id: raise UpdateFailed(f"Could not find entity {entity_id}") try: formatted_coordinates = coordinates.split(",") vol.Schema(cv.gps(formatted_coordinates)) except (AttributeError, vol.ExactSequenceInvalid) as ex: raise UpdateFailed( f"{entity_id} does not have valid coordinates: {coordinates}" ) from ex return formatted_coordinates # Destination if config.destination_entity_id is not None: destination = _from_entity_id(config.destination_entity_id) else: destination = [ str(config.destination_latitude), str(config.destination_longitude), ] # Origin if config.origin_entity_id is not None: origin = _from_entity_id(config.origin_entity_id) else: origin = [ str(config.origin_latitude), str(config.origin_longitude), ] # Arrival/Departure arrival: str | None = None departure: str | None = None if config.arrival is not None: arrival = next_datetime(config.arrival).isoformat() if config.departure is not None: departure = next_datetime(config.departure).isoformat() return (origin, destination, arrival, departure)
Build a hass frontend ready string out of the attributions.
def build_hass_attribution(sections: list[dict[str, Any]]) -> str | None: """Build a hass frontend ready string out of the attributions.""" relevant_attributions = [] for section in sections: if (attributions := section.get("attributions")) is not None: for attribution in attributions: if (href := attribution.get("href")) is not None: relevant_attributions.append(f"{href}") if (text := attribution.get("text")) is not None: relevant_attributions.append(text) if len(relevant_attributions) > 0: return ",".join(relevant_attributions) return None
Take a time like 08:00:00 and combine it with the current date.
def next_datetime(simple_time: time) -> datetime: """Take a time like 08:00:00 and combine it with the current date.""" combined = datetime.combine(dt_util.start_of_local_day(), simple_time) if combined < datetime.now(): combined = combined + timedelta(days=1) return combined
Construct SensorEntityDescriptions.
def sensor_descriptions(travel_mode: str) -> tuple[SensorEntityDescription, ...]: """Construct SensorEntityDescriptions.""" return ( SensorEntityDescription( translation_key="duration", icon=ICONS.get(travel_mode, ICON_CAR), key=ATTR_DURATION, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTime.MINUTES, ), SensorEntityDescription( translation_key="duration_in_traffic", icon=ICONS.get(travel_mode, ICON_CAR), key=ATTR_DURATION_IN_TRAFFIC, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfTime.MINUTES, ), SensorEntityDescription( translation_key="distance", icon=ICONS.get(travel_mode, ICON_CAR), key=ATTR_DISTANCE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.DISTANCE, native_unit_of_measurement=UnitOfLength.KILOMETERS, ), )
Set up the Hikvision binary sensor devices.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Hikvision binary sensor devices.""" name = config.get(CONF_NAME) host = config[CONF_HOST] port = config[CONF_PORT] username = config[CONF_USERNAME] password = config[CONF_PASSWORD] customize = config[CONF_CUSTOMIZE] protocol = "https" if config[CONF_SSL] else "http" url = f"{protocol}://{host}" data = HikvisionData(hass, url, port, name, username, password) if data.sensors is None: _LOGGER.error("Hikvision event stream has no data, unable to set up") return entities = [] for sensor, channel_list in data.sensors.items(): for channel in channel_list: # Build sensor name, then parse customize config. if data.type == "NVR": sensor_name = f"{sensor.replace(' ', '_')}_{channel[1]}" else: sensor_name = sensor.replace(" ", "_") custom = customize.get(sensor_name.lower(), {}) ignore = custom.get(CONF_IGNORED) delay = custom.get(CONF_DELAY) _LOGGER.debug( "Entity: %s - %s, Options - Ignore: %s, Delay: %s", data.name, sensor_name, ignore, delay, ) if not ignore: entities.append( HikvisionBinarySensor(hass, sensor, channel[1], data, delay) ) add_entities(entities)
Set up Hikvision camera.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up Hikvision camera.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) name = config.get(CONF_NAME) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) try: hikvision_cam = hikvision.api.CreateDevice( host, port=port, username=username, password=password, is_https=False ) except MissingParamError as param_err: _LOGGING.error("Missing required param: %s", param_err) return except HikvisionError as conn_err: _LOGGING.error("Unable to connect: %s", conn_err) return add_entities([HikvisionMotionSwitch(name, hikvision_cam)])
Validate that provided value is a valid IP address.
def coerce_ip(value): """Validate that provided value is a valid IP address.""" if not value: raise vol.Invalid("Must define an IP address") try: ipaddress.IPv4Network(value) except ValueError as err: raise vol.Invalid("Not a valid IP address") from err return value
Check the state machine to see if entities have changed since start time.
def entities_may_have_state_changes_after( hass: HomeAssistant, entity_ids: Iterable, start_time: dt, no_attributes: bool ) -> bool: """Check the state machine to see if entities have changed since start time.""" for entity_id in entity_ids: state = hass.states.get(entity_id) if state is None: return True state_time = state.last_changed if no_attributes else state.last_updated if state_time > start_time: return True return False
Check if the recorder has any runs after a specific time.
def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: """Check if the recorder has any runs after a specific time.""" return run_time >= process_timestamp( get_instance(hass).recorder_runs_manager.first.start )
Set up the history websocket API.
def async_setup(hass: HomeAssistant) -> None: """Set up the history websocket API.""" websocket_api.async_register_command(hass, ws_get_history_during_period) websocket_api.async_register_command(hass, ws_stream)
Fetch history significant_states and convert them to json in the executor.
def _ws_get_significant_states( hass: HomeAssistant, msg_id: int, start_time: dt, end_time: dt | None, entity_ids: list[str] | None, include_start_time_state: bool, significant_changes_only: bool, minimal_response: bool, no_attributes: bool, ) -> bytes: """Fetch history significant_states and convert them to json in the executor.""" return json_bytes( messages.result_message( msg_id, history.get_significant_states( hass, start_time, end_time, entity_ids, None, include_start_time_state, significant_changes_only, minimal_response, no_attributes, True, ), ) )
Generate a history stream message response.
def _generate_stream_message( states: dict[str, list[dict[str, Any]]], start_day: dt, end_day: dt, ) -> dict[str, Any]: """Generate a history stream message response.""" return { "states": states, "start_time": start_day.timestamp(), "end_time": end_day.timestamp(), }
Send an empty response when we know all results are filtered away.
def _async_send_empty_response( connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None ) -> None: """Send an empty response when we know all results are filtered away.""" connection.send_result(msg_id) stream_end_time = end_time or dt_util.utcnow() connection.send_message( _generate_websocket_response(msg_id, start_time, stream_end_time, {}) )
Generate a websocket response.
def _generate_websocket_response( msg_id: int, start_time: dt, end_time: dt, states: dict[str, list[dict[str, Any]]], ) -> bytes: """Generate a websocket response.""" return json_bytes( messages.event_message( msg_id, _generate_stream_message(states, start_time, end_time) ) )
Generate a historical response.
def _generate_historical_response( hass: HomeAssistant, msg_id: int, start_time: dt, end_time: dt, entity_ids: list[str] | None, include_start_time_state: bool, significant_changes_only: bool, minimal_response: bool, no_attributes: bool, send_empty: bool, ) -> tuple[float, dt | None, bytes | None]: """Generate a historical response.""" states = cast( dict[str, list[dict[str, Any]]], history.get_significant_states( hass, start_time, end_time, entity_ids, None, include_start_time_state, significant_changes_only, minimal_response, no_attributes, True, ), ) last_time_ts = 0.0 for state_list in states.values(): if ( state_list and (state_last_time := state_list[-1][COMPRESSED_STATE_LAST_UPDATED]) > last_time_ts ): last_time_ts = cast(float, state_last_time) if last_time_ts == 0: # If we did not send any states ever, we need to send an empty response # so the websocket client knows it should render/process/consume the # data. if not send_empty: return last_time_ts, None, None last_time_dt = end_time else: last_time_dt = dt_util.utc_from_timestamp(last_time_ts) return ( last_time_ts, last_time_dt, _generate_websocket_response(msg_id, start_time, last_time_dt, states), )
Convert a state to a compressed state.
def _history_compressed_state(state: State, no_attributes: bool) -> dict[str, Any]: """Convert a state to a compressed state.""" comp_state: dict[str, Any] = {COMPRESSED_STATE_STATE: state.state} if not no_attributes or state.domain in history.NEED_ATTRIBUTE_DOMAINS: comp_state[COMPRESSED_STATE_ATTRIBUTES] = state.attributes comp_state[COMPRESSED_STATE_LAST_UPDATED] = state.last_updated_timestamp if state.last_changed != state.last_updated: comp_state[COMPRESSED_STATE_LAST_CHANGED] = state.last_changed_timestamp return comp_state
Convert events to a compressed states.
def _events_to_compressed_states( events: Iterable[Event], no_attributes: bool ) -> dict[str, list[dict[str, Any]]]: """Convert events to a compressed states.""" states_by_entity_ids: dict[str, list[dict[str, Any]]] = {} for event in events: state: State = event.data["new_state"] entity_id: str = state.entity_id states_by_entity_ids.setdefault(entity_id, []).append( _history_compressed_state(state, no_attributes) ) return states_by_entity_ids
Subscribe to events for the entities and devices or all. These are the events we need to listen for to do the live history stream.
def _async_subscribe_events( hass: HomeAssistant, subscriptions: list[CALLBACK_TYPE], target: Callable[[Event[Any]], None], entity_ids: list[str], significant_changes_only: bool, minimal_response: bool, ) -> None: """Subscribe to events for the entities and devices or all. These are the events we need to listen for to do the live history stream. """ assert is_callback(target), "target must be a callback" @callback def _forward_state_events_filtered(event: Event[EventStateChangedData]) -> None: """Filter state events and forward them.""" if (new_state := event.data["new_state"]) is None or ( old_state := event.data["old_state"] ) is None: return if ( (significant_changes_only or minimal_response) and new_state.state == old_state.state and new_state.domain not in history.SIGNIFICANT_DOMAINS ): return target(event) subscriptions.append( async_track_state_change_event(hass, entity_ids, _forward_state_events_filtered) )
Parse the templates and return the period.
def async_calculate_period( duration: datetime.timedelta | None, start_template: Template | None, end_template: Template | None, ) -> tuple[datetime.datetime, datetime.datetime]: """Parse the templates and return the period.""" bounds: dict[str, datetime.datetime | None] = { DURATION_START: None, DURATION_END: None, } for bound, template in ( (DURATION_START, start_template), (DURATION_END, end_template), ): # Parse start if template is None: continue try: rendered = template.async_render() except (TemplateError, TypeError) as ex: if ex.args and not ex.args[0].startswith( "UndefinedError: 'None' has no attribute" ): _LOGGER.error("Error parsing template for field %s", bound, exc_info=ex) raise if isinstance(rendered, str): bounds[bound] = dt_util.parse_datetime(rendered) if bounds[bound] is not None: continue try: bounds[bound] = dt_util.as_local( dt_util.utc_from_timestamp(math.floor(float(rendered))) ) except ValueError as ex: raise ValueError( f"Parsing error: {bound} must be a datetime or a timestamp: {ex}" ) from ex start = bounds[DURATION_START] end = bounds[DURATION_END] # Calculate start or end using the duration if start is None: assert end is not None assert duration is not None start = end - duration if end is None: assert start is not None assert duration is not None end = start + duration return start, end
Format the ratio of value / period duration.
def pretty_ratio( value: float, period: tuple[datetime.datetime, datetime.datetime] ) -> float: """Format the ratio of value / period duration.""" if len(period) != 2 or period[0] == period[1]: return 0.0 ratio = 100 * value / (period[1] - period[0]).total_seconds() return round(ratio, 1)
Calculate the floored value of a timestamp.
def floored_timestamp(incoming_dt: datetime.datetime) -> float: """Calculate the floored value of a timestamp.""" return math.floor(dt_util.as_timestamp(incoming_dt))