response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Fetch events and convert them to json in the executor.
def _ws_formatted_get_events( msg_id: int, start_time: dt, end_time: dt, event_processor: EventProcessor, ) -> bytes: """Fetch events and convert them to json in the executor.""" return json_bytes( messages.result_message( msg_id, event_processor.get_events(start_time, end_time) ) )
Add an entry to the logbook.
def log_entry( hass: HomeAssistant, name: str, message: str, domain: str | None = None, entity_id: str | None = None, context: Context | None = None, ) -> None: """Add an entry to the logbook.""" hass.add_job(async_log_entry, hass, name, message, domain, entity_id, context)
Add an entry to the logbook.
def async_log_entry( hass: HomeAssistant, name: str, message: str, domain: str | None = None, entity_id: str | None = None, context: Context | None = None, ) -> None: """Add an entry to the logbook.""" data = {LOGBOOK_ENTRY_NAME: name, LOGBOOK_ENTRY_MESSAGE: message} if domain is not None: data[LOGBOOK_ENTRY_DOMAIN] = domain if entity_id is not None: data[LOGBOOK_ENTRY_ENTITY_ID] = entity_id hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data, context=context)
Process a logbook platform.
def _process_logbook_platform(hass: HomeAssistant, domain: str, platform: Any) -> None: """Process a logbook platform.""" logbook_config: LogbookConfig = hass.data[DOMAIN] external_events = logbook_config.external_events @callback def _async_describe_event( domain: str, event_name: str, describe_callback: Callable[[LazyEventPartialState], dict[str, Any]], ) -> None: """Teach logbook how to describe a new event.""" external_events[event_name] = (domain, describe_callback) platform.async_describe_events(hass, _async_describe_event)
Generate a logbook query for all entities.
def all_stmt( start_day: float, end_day: float, event_type_ids: tuple[int, ...], filters: Filters | None, context_id_bin: bytes | None = None, ) -> StatementLambdaElement: """Generate a logbook query for all entities.""" stmt = lambda_stmt( lambda: select_events_without_states(start_day, end_day, event_type_ids) ) if context_id_bin is not None: stmt += lambda s: s.where(Events.context_id_bin == context_id_bin).union_all( _states_query_for_context_id(start_day, end_day, context_id_bin), ) elif filters and filters.has_config: stmt = stmt.add_criteria( lambda q: q.filter(filters.events_entity_filter()).union_all( _states_query_for_all(start_day, end_day).where( filters.states_metadata_entity_filter() ) ), track_on=[filters], ) else: stmt += lambda s: s.union_all(_states_query_for_all(start_day, end_day)) stmt += lambda s: s.order_by(Events.time_fired_ts) return stmt
Force mysql to use the right index on large selects.
def _apply_all_hints(sel: Select) -> Select: """Force mysql to use the right index on large selects.""" return sel.with_hint( States, f"FORCE INDEX ({LAST_UPDATED_INDEX_TS})", dialect_name="mysql" ).with_hint( States, f"FORCE INDEX ({LAST_UPDATED_INDEX_TS})", dialect_name="mariadb" )
Generate the select for a context_id subquery.
def select_events_context_id_subquery( start_day: float, end_day: float, event_type_ids: tuple[int, ...], ) -> Select: """Generate the select for a context_id subquery.""" return ( select(Events.context_id_bin) .where((Events.time_fired_ts > start_day) & (Events.time_fired_ts < end_day)) .where(Events.event_type_id.in_(event_type_ids)) .outerjoin(EventTypes, (Events.event_type_id == EventTypes.event_type_id)) .outerjoin(EventData, (Events.data_id == EventData.data_id)) )
Generate an events query that mark them as for context_only. By marking them as context_only we know they are only for linking context ids and we can avoid processing them.
def select_events_context_only() -> Select: """Generate an events query that mark them as for context_only. By marking them as context_only we know they are only for linking context ids and we can avoid processing them. """ return select(*EVENT_ROWS_NO_STATES, CONTEXT_ONLY)
Generate an states query that mark them as for context_only. By marking them as context_only we know they are only for linking context ids and we can avoid processing them.
def select_states_context_only() -> Select: """Generate an states query that mark them as for context_only. By marking them as context_only we know they are only for linking context ids and we can avoid processing them. """ return select( *EVENT_COLUMNS_FOR_STATE_SELECT, *STATE_CONTEXT_ONLY_COLUMNS, CONTEXT_ONLY )
Generate an events select that does not join states.
def select_events_without_states( start_day: float, end_day: float, event_type_ids: tuple[int, ...] ) -> Select: """Generate an events select that does not join states.""" return ( select(*EVENT_ROWS_NO_STATES, NOT_CONTEXT_ONLY) .where((Events.time_fired_ts > start_day) & (Events.time_fired_ts < end_day)) .where(Events.event_type_id.in_(event_type_ids)) .outerjoin(EventTypes, (Events.event_type_id == EventTypes.event_type_id)) .outerjoin(EventData, (Events.data_id == EventData.data_id)) )
Generate a states select that formats the states table as event rows.
def select_states() -> Select: """Generate a states select that formats the states table as event rows.""" return select( *EVENT_COLUMNS_FOR_STATE_SELECT, *STATE_COLUMNS, NOT_CONTEXT_ONLY, )
Filter states by time range. Filters states that do not have an old state or new state (added / removed) Filters states that are in a continuous domain with a UOM. Filters states that do not have matching last_updated_ts and last_changed_ts.
def apply_states_filters(sel: Select, start_day: float, end_day: float) -> Select: """Filter states by time range. Filters states that do not have an old state or new state (added / removed) Filters states that are in a continuous domain with a UOM. Filters states that do not have matching last_updated_ts and last_changed_ts. """ return ( sel.filter( (States.last_updated_ts > start_day) & (States.last_updated_ts < end_day) ) .outerjoin(OLD_STATE, (States.old_state_id == OLD_STATE.state_id)) .where(_missing_state_matcher()) .where(_not_continuous_entity_matcher()) .where( (States.last_updated_ts == States.last_changed_ts) | States.last_changed_ts.is_(None) ) .outerjoin( StateAttributes, (States.attributes_id == StateAttributes.attributes_id) ) .outerjoin(StatesMeta, (States.metadata_id == StatesMeta.metadata_id)) )
Match non continuous entities.
def _not_continuous_entity_matcher() -> ColumnElement[bool]: """Match non continuous entities.""" return sqlalchemy.or_( # First exclude domains that may be continuous _not_possible_continuous_domain_matcher(), # But let in the entities in the possible continuous domains # that are not actually continuous sensors because they lack a UOM sqlalchemy.and_( _conditionally_continuous_domain_matcher, _not_uom_attributes_matcher() ).self_group(), )
Match not continuous domains. This matches domain that are always considered continuous and domains that are conditionally (if they have a UOM) continuous domains.
def _not_possible_continuous_domain_matcher() -> ColumnElement[bool]: """Match not continuous domains. This matches domain that are always considered continuous and domains that are conditionally (if they have a UOM) continuous domains. """ return sqlalchemy.and_( *[ ~StatesMeta.entity_id.like(entity_domain) for entity_domain in ( *ALWAYS_CONTINUOUS_ENTITY_ID_LIKE, *CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE, ) ], ).self_group()
Match conditionally continuous domains. This matches domain that are only considered continuous if a UOM is set.
def _conditionally_continuous_domain_matcher() -> ColumnElement[bool]: """Match conditionally continuous domains. This matches domain that are only considered continuous if a UOM is set. """ return sqlalchemy.or_( *[ StatesMeta.entity_id.like(entity_domain) for entity_domain in CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE ], ).self_group()
Prefilter ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.
def _not_uom_attributes_matcher() -> BooleanClauseList: """Prefilter ATTR_UNIT_OF_MEASUREMENT as its much faster in sql.""" return ~StateAttributes.shared_attrs.like( UNIT_OF_MEASUREMENT_JSON_LIKE ) | ~States.attributes.like(UNIT_OF_MEASUREMENT_JSON_LIKE)
Force mysql to use the right index on large context_id selects.
def apply_states_context_hints(sel: Select) -> Select: """Force mysql to use the right index on large context_id selects.""" return sel.with_hint( States, f"FORCE INDEX ({STATES_CONTEXT_ID_BIN_INDEX})", dialect_name="mysql" ).with_hint( States, f"FORCE INDEX ({STATES_CONTEXT_ID_BIN_INDEX})", dialect_name="mariadb" )
Force mysql to use the right index on large context_id selects.
def apply_events_context_hints(sel: Select) -> Select: """Force mysql to use the right index on large context_id selects.""" return sel.with_hint( Events, f"FORCE INDEX ({EVENTS_CONTEXT_ID_BIN_INDEX})", dialect_name="mysql" ).with_hint( Events, f"FORCE INDEX ({EVENTS_CONTEXT_ID_BIN_INDEX})", dialect_name="mariadb" )
Generate a subquery to find context ids for multiple devices.
def _select_device_id_context_ids_sub_query( start_day: float, end_day: float, event_type_ids: tuple[int, ...], json_quotable_device_ids: list[str], ) -> Select: """Generate a subquery to find context ids for multiple devices.""" inner = ( select_events_context_id_subquery(start_day, end_day, event_type_ids) .where(apply_event_device_id_matchers(json_quotable_device_ids)) .subquery() ) return select(inner.c.context_id_bin).group_by(inner.c.context_id_bin)
Generate a CTE to find the device context ids and a query to find linked row.
def _apply_devices_context_union( sel: Select, start_day: float, end_day: float, event_type_ids: tuple[int, ...], json_quotable_device_ids: list[str], ) -> CompoundSelect: """Generate a CTE to find the device context ids and a query to find linked row.""" devices_cte: CTE = _select_device_id_context_ids_sub_query( start_day, end_day, event_type_ids, json_quotable_device_ids, ).cte() return sel.union_all( apply_events_context_hints( select_events_context_only() .select_from(devices_cte) .outerjoin(Events, devices_cte.c.context_id_bin == Events.context_id_bin) .outerjoin(EventTypes, (Events.event_type_id == EventTypes.event_type_id)) .outerjoin(EventData, (Events.data_id == EventData.data_id)), ), apply_states_context_hints( select_states_context_only() .select_from(devices_cte) .outerjoin(States, devices_cte.c.context_id_bin == States.context_id_bin) .outerjoin(StatesMeta, (States.metadata_id == StatesMeta.metadata_id)) ), )
Generate a logbook query for multiple devices.
def devices_stmt( start_day: float, end_day: float, event_type_ids: tuple[int, ...], json_quotable_device_ids: list[str], ) -> StatementLambdaElement: """Generate a logbook query for multiple devices.""" return lambda_stmt( lambda: _apply_devices_context_union( select_events_without_states(start_day, end_day, event_type_ids).where( apply_event_device_id_matchers(json_quotable_device_ids) ), start_day, end_day, event_type_ids, json_quotable_device_ids, ).order_by(Events.time_fired_ts) )
Create matchers for the device_ids in the event_data.
def apply_event_device_id_matchers( json_quotable_device_ids: Iterable[str], ) -> BooleanClauseList: """Create matchers for the device_ids in the event_data.""" return DEVICE_ID_IN_EVENT.is_not(None) & sqlalchemy.cast( DEVICE_ID_IN_EVENT, sqlalchemy.Text() ).in_(json_quotable_device_ids)
Generate a subquery to find context ids for multiple entities.
def _select_entities_context_ids_sub_query( start_day: float, end_day: float, event_type_ids: tuple[int, ...], states_metadata_ids: Collection[int], json_quoted_entity_ids: list[str], ) -> Select: """Generate a subquery to find context ids for multiple entities.""" union = union_all( select_events_context_id_subquery(start_day, end_day, event_type_ids).where( apply_event_entity_id_matchers(json_quoted_entity_ids) ), apply_entities_hints(select(States.context_id_bin)) .filter( (States.last_updated_ts > start_day) & (States.last_updated_ts < end_day) ) .where(States.metadata_id.in_(states_metadata_ids)), ).subquery() return select(union.c.context_id_bin).group_by(union.c.context_id_bin)
Generate a CTE to find the entity and device context ids and a query to find linked row.
def _apply_entities_context_union( sel: Select, start_day: float, end_day: float, event_type_ids: tuple[int, ...], states_metadata_ids: Collection[int], json_quoted_entity_ids: list[str], ) -> CompoundSelect: """Generate a CTE to find the entity and device context ids and a query to find linked row.""" entities_cte: CTE = _select_entities_context_ids_sub_query( start_day, end_day, event_type_ids, states_metadata_ids, json_quoted_entity_ids, ).cte() # We used to optimize this to exclude rows we already in the union with # a StatesMeta.metadata_ids.not_in(states_metadata_ids) but that made the # query much slower on MySQL, and since we already filter them away # in the python code anyways since they will have context_only # set on them the impact is minimal. return sel.union_all( states_select_for_entity_ids(start_day, end_day, states_metadata_ids), apply_events_context_hints( select_events_context_only() .select_from(entities_cte) .outerjoin(Events, entities_cte.c.context_id_bin == Events.context_id_bin) .outerjoin(EventTypes, (Events.event_type_id == EventTypes.event_type_id)) .outerjoin(EventData, (Events.data_id == EventData.data_id)) ), apply_states_context_hints( select_states_context_only() .select_from(entities_cte) .outerjoin(States, entities_cte.c.context_id_bin == States.context_id_bin) .outerjoin(StatesMeta, (States.metadata_id == StatesMeta.metadata_id)) ), )
Generate a logbook query for multiple entities.
def entities_stmt( start_day: float, end_day: float, event_type_ids: tuple[int, ...], states_metadata_ids: Collection[int], json_quoted_entity_ids: list[str], ) -> StatementLambdaElement: """Generate a logbook query for multiple entities.""" return lambda_stmt( lambda: _apply_entities_context_union( select_events_without_states(start_day, end_day, event_type_ids).where( apply_event_entity_id_matchers(json_quoted_entity_ids) ), start_day, end_day, event_type_ids, states_metadata_ids, json_quoted_entity_ids, ).order_by(Events.time_fired_ts) )
Generate a select for states from the States table for specific entities.
def states_select_for_entity_ids( start_day: float, end_day: float, states_metadata_ids: Collection[int] ) -> Select: """Generate a select for states from the States table for specific entities.""" return apply_states_filters( apply_entities_hints(select_states()), start_day, end_day ).where(States.metadata_id.in_(states_metadata_ids))
Create matchers for the entity_id in the event_data.
def apply_event_entity_id_matchers( json_quoted_entity_ids: Iterable[str], ) -> ColumnElement[bool]: """Create matchers for the entity_id in the event_data.""" return sqlalchemy.or_( ENTITY_ID_IN_EVENT.is_not(None) & sqlalchemy.cast(ENTITY_ID_IN_EVENT, sqlalchemy.Text()).in_( json_quoted_entity_ids ), OLD_ENTITY_ID_IN_EVENT.is_not(None) & sqlalchemy.cast(OLD_ENTITY_ID_IN_EVENT, sqlalchemy.Text()).in_( json_quoted_entity_ids ), )
Force mysql to use the right index on large selects.
def apply_entities_hints(sel: Select) -> Select: """Force mysql to use the right index on large selects.""" return sel.with_hint( States, f"FORCE INDEX ({METADATA_ID_LAST_UPDATED_INDEX_TS})", dialect_name="mysql", ).with_hint( States, f"FORCE INDEX ({METADATA_ID_LAST_UPDATED_INDEX_TS})", dialect_name="mariadb", )
Generate a subquery to find context ids for multiple entities and multiple devices.
def _select_entities_device_id_context_ids_sub_query( start_day: float, end_day: float, event_type_ids: tuple[int, ...], states_metadata_ids: Collection[int], json_quoted_entity_ids: list[str], json_quoted_device_ids: list[str], ) -> Select: """Generate a subquery to find context ids for multiple entities and multiple devices.""" union = union_all( select_events_context_id_subquery(start_day, end_day, event_type_ids).where( _apply_event_entity_id_device_id_matchers( json_quoted_entity_ids, json_quoted_device_ids ) ), apply_entities_hints(select(States.context_id_bin)) .filter( (States.last_updated_ts > start_day) & (States.last_updated_ts < end_day) ) .where(States.metadata_id.in_(states_metadata_ids)), ).subquery() return select(union.c.context_id_bin).group_by(union.c.context_id_bin)
Generate a logbook query for multiple entities.
def entities_devices_stmt( start_day: float, end_day: float, event_type_ids: tuple[int, ...], states_metadata_ids: Collection[int], json_quoted_entity_ids: list[str], json_quoted_device_ids: list[str], ) -> StatementLambdaElement: """Generate a logbook query for multiple entities.""" return lambda_stmt( lambda: _apply_entities_devices_context_union( select_events_without_states(start_day, end_day, event_type_ids).where( _apply_event_entity_id_device_id_matchers( json_quoted_entity_ids, json_quoted_device_ids ) ), start_day, end_day, event_type_ids, states_metadata_ids, json_quoted_entity_ids, json_quoted_device_ids, ).order_by(Events.time_fired_ts) )
Create matchers for the device_id and entity_id in the event_data.
def _apply_event_entity_id_device_id_matchers( json_quoted_entity_ids: Iterable[str], json_quoted_device_ids: Iterable[str] ) -> ColumnElement[bool]: """Create matchers for the device_id and entity_id in the event_data.""" return apply_event_entity_id_matchers( json_quoted_entity_ids ) | apply_event_device_id_matchers(json_quoted_device_ids)
Generate the logbook statement for a logbook request.
def statement_for_request( start_day_dt: dt, end_day_dt: dt, event_type_ids: tuple[int, ...], entity_ids: list[str] | None = None, states_metadata_ids: Collection[int] | None = None, device_ids: list[str] | None = None, filters: Filters | None = None, context_id: str | None = None, ) -> StatementLambdaElement: """Generate the logbook statement for a logbook request.""" start_day = start_day_dt.timestamp() end_day = end_day_dt.timestamp() # No entities: logbook sends everything for the timeframe # limited by the context_id and the yaml configured filter if not entity_ids and not device_ids: context_id_bin = ulid_to_bytes_or_none(context_id) return all_stmt( start_day, end_day, event_type_ids, filters, context_id_bin, ) # sqlalchemy caches object quoting, the # json quotable ones must be a different # object from the non-json ones to prevent # sqlalchemy from quoting them incorrectly # entities and devices: logbook sends everything for the timeframe for the entities and devices if entity_ids and device_ids: return entities_devices_stmt( start_day, end_day, event_type_ids, states_metadata_ids or [], [json_dumps(entity_id) for entity_id in entity_ids], [json_dumps(device_id) for device_id in device_ids], ) # entities: logbook sends everything for the timeframe for the entities if entity_ids: return entities_stmt( start_day, end_day, event_type_ids, states_metadata_ids or [], [json_dumps(entity_id) for entity_id in entity_ids], ) # devices: logbook sends everything for the timeframe for the devices assert device_ids is not None return devices_stmt( start_day, end_day, event_type_ids, [json_dumps(device_id) for device_id in device_ids], )
Set up the Logentries component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Logentries component.""" conf = config[DOMAIN] token = conf.get(CONF_TOKEN) le_wh = f"{DEFAULT_HOST}{token}" def logentries_event_listener(event): """Listen for new messages on the bus and sends them to Logentries.""" if (state := event.data.get("new_state")) is None: return try: _state = state_helper.state_as_number(state) except ValueError: _state = state.state json_body = [ { "domain": state.domain, "entity_id": state.object_id, "attributes": dict(state.attributes), "time": str(event.time_fired), "value": _state, } ] try: payload = {"host": le_wh, "event": json_body} requests.post(le_wh, data=json.dumps(payload), timeout=10) except requests.exceptions.RequestException: _LOGGER.exception("Error sending to Logentries") hass.bus.listen(EVENT_STATE_CHANGED, logentries_event_listener) return True
Return the domain config.
def async_get_domain_config(hass: HomeAssistant) -> LoggerDomainConfig: """Return the domain config.""" return cast(LoggerDomainConfig, hass.data[DOMAIN])
Set the default log level for components.
def set_default_log_level(hass: HomeAssistant, level: int) -> None: """Set the default log level for components.""" _set_log_level(logging.getLogger(""), level) hass.bus.async_fire(EVENT_LOGGING_CHANGED)
Set the specified log levels.
def set_log_levels(hass: HomeAssistant, logpoints: Mapping[str, int]) -> None: """Set the specified log levels.""" async_get_domain_config(hass).overrides.update(logpoints) for key, value in logpoints.items(): _set_log_level(logging.getLogger(key), value) hass.bus.async_fire(EVENT_LOGGING_CHANGED)
Set the log level. Any logger fetched before this integration is loaded will use old class.
def _set_log_level(logger: logging.Logger, level: int) -> None: """Set the log level. Any logger fetched before this integration is loaded will use old class. """ getattr(logger, "orig_setLevel", logger.setLevel)(level)
Return the chattiest log level.
def _chattiest_log_level(level1: int, level2: int) -> int: """Return the chattiest log level.""" if level1 == logging.NOTSET: return level2 if level2 == logging.NOTSET: return level1 return min(level1, level2)
Set up the websocket API.
def async_load_websocket_api(hass: HomeAssistant) -> None: """Set up the websocket API.""" websocket_api.async_register_command(hass, handle_integration_log_info) websocket_api.async_register_command(hass, handle_integration_log_level) websocket_api.async_register_command(hass, handle_module_log_level)
Handle integrations logger info.
def handle_integration_log_info( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle integrations logger info.""" connection.send_result( msg["id"], [ { "domain": integration, "level": get_logger( f"homeassistant.components.{integration}" ).getEffectiveLevel(), } for integration in async_get_loaded_integrations(hass) ], )
Add a Filter to the logger based on a regexp of the filter_str.
def _add_log_filter(logger: logging.Logger, patterns: list[re.Pattern]) -> None: """Add a Filter to the logger based on a regexp of the filter_str.""" def filter_func(logrecord: logging.LogRecord) -> bool: return not any(p.search(logrecord.getMessage()) for p in patterns) logger.addFilter(filter_func)
Create a logger subclass. logging.setLoggerClass checks if it is a subclass of Logger and so we cannot use partial to inject hass_overrides.
def _get_logger_class(hass_overrides: dict[str, int]) -> type[logging.Logger]: """Create a logger subclass. logging.setLoggerClass checks if it is a subclass of Logger and so we cannot use partial to inject hass_overrides. """ class HassLogger(logging.Logger): """Home Assistant aware logger class.""" def setLevel(self, level: int | str) -> None: """Set the log level unless overridden.""" if self.name in hass_overrides: return super().setLevel(level) def orig_setLevel(self, level: int | str) -> None: """Set the log level.""" super().setLevel(level) return HassLogger
Register a flow implementation. domain: Domain of the component responsible for the implementation. client_id: Client ID. client_secret: Client secret. api_key: API key issued by Logitech. redirect_uri: Auth callback redirect URI. sensors: Sensor config.
def register_flow_implementation( hass, domain, client_id, client_secret, api_key, redirect_uri, sensors ): """Register a flow implementation. domain: Domain of the component responsible for the implementation. client_id: Client ID. client_secret: Client secret. api_key: API key issued by Logitech. redirect_uri: Auth callback redirect URI. sensors: Sensor config. """ if DATA_FLOW_IMPL not in hass.data: hass.data[DATA_FLOW_IMPL] = OrderedDict() hass.data[DATA_FLOW_IMPL][domain] = { CONF_CLIENT_ID: client_id, CONF_CLIENT_SECRET: client_secret, CONF_API_KEY: api_key, CONF_REDIRECT_URI: redirect_uri, CONF_SENSORS: sensors, EXTERNAL_ERRORS: None, }
Set up the London Air sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the London Air sensor.""" data = APIData() data.update() add_entities((AirSensor(name, data) for name in config[CONF_LOCATIONS]), True)
Iterate over list of species at each site.
def parse_species(species_data): """Iterate over list of species at each site.""" parsed_species_data = [] quality_list = [] for species in species_data: if species["@AirQualityBand"] != "No data": species_dict = {} species_dict["description"] = species["@SpeciesDescription"] species_dict["code"] = species["@SpeciesCode"] species_dict["quality"] = species["@AirQualityBand"] species_dict["index"] = species["@AirQualityIndex"] species_dict["summary"] = ( f"{species_dict['code']} is {species_dict['quality']}" ) parsed_species_data.append(species_dict) quality_list.append(species_dict["quality"]) return parsed_species_data, quality_list
Iterate over all sites at an authority.
def parse_site(entry_sites_data): """Iterate over all sites at an authority.""" authority_data = [] for site in entry_sites_data: site_data = {} species_data = [] site_data["updated"] = site["@BulletinDate"] site_data["latitude"] = site["@Latitude"] site_data["longitude"] = site["@Longitude"] site_data["site_code"] = site["@SiteCode"] site_data["site_name"] = site["@SiteName"].split("-")[-1].lstrip() site_data["site_type"] = site["@SiteType"] if isinstance(site["Species"], dict): species_data = [site["Species"]] else: species_data = site["Species"] parsed_species_data, quality_list = parse_species(species_data) if not parsed_species_data: parsed_species_data.append("no_species_data") site_data["pollutants"] = parsed_species_data if quality_list: site_data["pollutants_status"] = max( set(quality_list), key=quality_list.count ) site_data["number_of_pollutants"] = len(quality_list) else: site_data["pollutants_status"] = "no_species_data" site_data["number_of_pollutants"] = 0 authority_data.append(site_data) return authority_data
Parse return dict or list of data from API.
def parse_api_response(response): """Parse return dict or list of data from API.""" data = dict.fromkeys(AUTHORITIES) for authority in AUTHORITIES: for entry in response["HourlyAirQualityIndex"]["LocalAuthority"]: if entry["@LocalAuthorityName"] == authority: entry_sites_data = [] if "Site" in entry: if isinstance(entry["Site"], dict): entry_sites_data = [entry["Site"]] else: entry_sites_data = entry["Site"] data[authority] = parse_site(entry_sites_data) return data
Convert a lookin device into DeviceInfo.
def _lookin_device_to_device_info(lookin_device: Device, host: str) -> DeviceInfo: """Convert a lookin device into DeviceInfo.""" return DeviceInfo( identifiers={(DOMAIN, lookin_device.id)}, name=lookin_device.name, manufacturer="LOOKin", model=MODEL_NAMES[lookin_device.model], sw_version=lookin_device.firmware, configuration_url=f"http://{host}/device", )
Create a function to capture the cell variable.
def _async_climate_updater( lookin_protocol: LookInHttpProtocol, uuid: str, ) -> Callable[[], Coroutine[None, Any, Remote]]: """Create a function to capture the cell variable.""" async def _async_update() -> Climate: return await lookin_protocol.get_conditioner(uuid) return _async_update
Create a function to capture the cell variable.
def _async_remote_updater( lookin_protocol: LookInHttpProtocol, uuid: str, ) -> Callable[[], Coroutine[None, Any, Remote]]: """Create a function to capture the cell variable.""" async def _async_update() -> Remote: return await lookin_protocol.get_remote(uuid) return _async_update
Convert dashboard info to browse item.
def _item_from_info(info: dict) -> BrowseMedia: """Convert dashboard info to browse item.""" return BrowseMedia( title=info["title"], media_class=MediaClass.APP, media_content_id=info["url_path"], media_content_type=DOMAIN, thumbnail="https://brands.home-assistant.io/_/lovelace/logo.png", can_play=True, can_expand=len(info["views"]) > 1, )
Validate value is a valid url slug.
def url_slug(value: Any) -> str: """Validate value is a valid url slug.""" if value is None: raise vol.Invalid("Slug should not be None") if "-" not in value: raise vol.Invalid("Url path needs to contain a hyphen (-)") str_value = str(value) slg = slugify(str_value, separator="-") if str_value == slg: return str_value raise vol.Invalid(f"invalid slug {value} (try {slg})")
Generate info about the config.
def _config_info(mode, config): """Generate info about the config.""" return { "mode": mode, "views": len(config.get("views", [])), }
Register system health callbacks.
def async_register( hass: HomeAssistant, register: system_health.SystemHealthRegistration ) -> None: """Register system health callbacks.""" register.async_register_info(system_health_info, "/config/lovelace")
Handle error with WebSocket calls.
def _handle_errors(func): """Handle error with WebSocket calls.""" @wraps(func) async def send_with_error_handling( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: url_path = msg.get(CONF_URL_PATH) config: LovelaceStorage | None = hass.data[DOMAIN]["dashboards"].get(url_path) if config is None: connection.send_error( msg["id"], "config_not_found", f"Unknown config specified: {url_path}" ) return error = None try: result = await func(hass, connection, msg, config) except ConfigNotFound: error = "config_not_found", "No config found." except HomeAssistantError as err: error = "error", str(err) if error is not None: connection.send_error(msg["id"], *error) return connection.send_result(msg["id"], result) return send_with_error_handling
Delete Lovelace UI configuration.
def websocket_lovelace_dashboards( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: """Delete Lovelace UI configuration.""" connection.send_result( msg["id"], [ dashboard.config for dashboard in hass.data[DOMAIN]["dashboards"].values() if dashboard.config ], )
Register a panel.
def _register_panel(hass, url_path, mode, config, update): """Register a panel.""" kwargs = { "frontend_url_path": url_path, "require_admin": config[CONF_REQUIRE_ADMIN], "config": {"mode": mode}, "update": update, } if config[CONF_SHOW_IN_SIDEBAR]: kwargs["sidebar_title"] = config[CONF_TITLE] kwargs["sidebar_icon"] = config.get(CONF_ICON, DEFAULT_ICON) frontend.async_register_built_in_panel(hass, DOMAIN, **kwargs)
Validate the configuration and return a Luci scanner.
def get_scanner(hass: HomeAssistant, config: ConfigType) -> LuciDeviceScanner | None: """Validate the configuration and return a Luci scanner.""" scanner = LuciDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None
Convert the given Home Assistant light level (0-255) to Lutron (0.0-100.0).
def to_lutron_level(level): """Convert the given Home Assistant light level (0-255) to Lutron (0.0-100.0).""" return float((level * 100) / 255)
Convert the given Lutron (0.0-100.0) light level to Home Assistant (0-255).
def to_hass_level(level): """Convert the given Lutron (0.0-100.0) light level to Home Assistant (0-255).""" return int((level * 255) / 100)
If uuid becomes available update to use it.
def _async_check_entity_unique_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, platform: str, uuid: str, legacy_uuid: str, controller_guid: str, ) -> None: """If uuid becomes available update to use it.""" if not uuid: return unique_id = f"{controller_guid}_{legacy_uuid}" entity_id = entity_registry.async_get_entity_id( domain=platform, platform=DOMAIN, unique_id=unique_id ) if entity_id: new_unique_id = f"{controller_guid}_{uuid}" _LOGGER.debug("Updating entity id from %s to %s", unique_id, new_unique_id) entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
If uuid becomes available update to use it.
def _async_check_device_identifiers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, uuid: str, legacy_uuid: str, controller_guid: str, ) -> None: """If uuid becomes available update to use it.""" if not uuid: return unique_id = f"{controller_guid}_{legacy_uuid}" device = device_registry.async_get_device(identifiers={(DOMAIN, unique_id)}) if device: new_unique_id = f"{controller_guid}_{uuid}" _LOGGER.debug("Updating device id from %s to %s", unique_id, new_unique_id) device_registry.async_update_device( device.id, new_identifiers={(DOMAIN, new_unique_id)} )
Reverse a dictionary.
def _reverse_dict(forward_dict: dict) -> dict: """Reverse a dictionary.""" return {v: k for k, v in forward_dict.items()}
Get a lutron integration data for the given device registry device id.
def get_lutron_data_by_dr_id(hass: HomeAssistant, device_id: str): """Get a lutron integration data for the given device registry device id.""" if DOMAIN not in hass.data: return None for entry_id in hass.data[DOMAIN]: data: LutronCasetaData = hass.data[DOMAIN][entry_id] if data.keypad_data.dr_device_id_to_keypad.get(device_id): return data return None
Convert the given Home Assistant light level (0-255) to Lutron (0-100).
def to_lutron_level(level): """Convert the given Home Assistant light level (0-255) to Lutron (0-100).""" return int(round((level * 100) / 255))
Convert the given Lutron (0-100) light level to Home Assistant (0-255).
def to_hass_level(level): """Convert the given Lutron (0-100) light level to Home Assistant (0-255).""" return int((level * 255) // 100)
Describe logbook events.
def async_describe_events( hass: HomeAssistant, async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None], ) -> None: """Describe logbook events.""" @callback def async_describe_button_event(event: Event) -> dict[str, str]: """Describe lutron_caseta_button_event logbook event.""" data = event.data device_type = data[ATTR_TYPE] leap_button_number = data[ATTR_LEAP_BUTTON_NUMBER] dr_device_id = data[ATTR_DEVICE_ID] rev_button_map: dict[int, str] | None = None keypad_button_names_to_leap: dict[int, dict[str, int]] = {} keypad_id: int = -1 if lutron_data := get_lutron_data_by_dr_id(hass, dr_device_id): keypad_data = lutron_data.keypad_data keypad = keypad_data.dr_device_id_to_keypad.get(dr_device_id) keypad_id = keypad["lutron_device_id"] keypad_button_names_to_leap = keypad_data.button_names_to_leap if not (rev_button_map := LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP.get(device_type)): if fwd_button_map := keypad_button_names_to_leap.get(keypad_id): rev_button_map = _reverse_dict(fwd_button_map) if rev_button_map is None: return { LOGBOOK_ENTRY_NAME: f"{data[ATTR_AREA_NAME]} {data[ATTR_DEVICE_NAME]}", LOGBOOK_ENTRY_MESSAGE: ( f"{data[ATTR_ACTION]} Error retrieving button description" ), } button_description = rev_button_map.get(leap_button_number) return { LOGBOOK_ENTRY_NAME: f"{data[ATTR_AREA_NAME]} {data[ATTR_DEVICE_NAME]}", LOGBOOK_ENTRY_MESSAGE: f"{data[ATTR_ACTION]} {button_description}", } async_describe_event( DOMAIN, LUTRON_CASETA_BUTTON_EVENT, async_describe_button_event )
Convert a lutron serial number to a unique id.
def serial_to_unique_id(serial: int) -> str: """Convert a lutron serial number to a unique id.""" return hex(serial)[2:].zfill(8)
Register the bridge device in the device registry.
def _async_register_bridge_device( hass: HomeAssistant, config_entry_id: str, bridge_device: dict, bridge: Smartbridge ) -> None: """Register the bridge device in the device registry.""" device_registry = dr.async_get(hass) device_args = DeviceInfo( name=bridge_device["name"], manufacturer=MANUFACTURER, identifiers={(DOMAIN, bridge_device["serial"])}, model=f"{bridge_device['model']} ({bridge_device['type']})", via_device=(DOMAIN, bridge_device["serial"]), configuration_url="https://device-login.lutron.com", ) area = _area_name_from_id(bridge.areas, bridge_device["area"]) if area != UNASSIGNED_AREA: device_args["suggested_area"] = area device_registry.async_get_or_create(**device_args, config_entry_id=config_entry_id)
Register keypad devices (Keypads and Pico Remotes) in the device registry.
def _async_setup_keypads( hass: HomeAssistant, config_entry_id: str, bridge: Smartbridge, bridge_device: dict[str, str | int], ) -> LutronKeypadData: """Register keypad devices (Keypads and Pico Remotes) in the device registry.""" device_registry = dr.async_get(hass) bridge_devices: dict[str, dict[str, str | int]] = bridge.get_devices() bridge_buttons: dict[str, dict[str, str | int]] = bridge.buttons dr_device_id_to_keypad: dict[str, LutronKeypad] = {} keypads: dict[int, LutronKeypad] = {} keypad_buttons: dict[int, LutronButton] = {} keypad_button_names_to_leap: dict[int, dict[str, int]] = {} leap_to_keypad_button_names: dict[int, dict[int, str]] = {} for bridge_button in bridge_buttons.values(): parent_device = cast(str, bridge_button["parent_device"]) bridge_keypad = bridge_devices[parent_device] keypad_lutron_device_id = cast(int, bridge_keypad["device_id"]) button_lutron_device_id = cast(int, bridge_button["device_id"]) leap_button_number = cast(int, bridge_button["button_number"]) button_led_device_id = None if "button_led" in bridge_button: button_led_device_id = cast(str, bridge_button["button_led"]) if not (keypad := keypads.get(keypad_lutron_device_id)): # First time seeing this keypad, build keypad data and store in keypads keypad = keypads[keypad_lutron_device_id] = _async_build_lutron_keypad( bridge, bridge_device, bridge_keypad, keypad_lutron_device_id ) # Register the keypad device dr_device = device_registry.async_get_or_create( **keypad["device_info"], config_entry_id=config_entry_id ) keypad[LUTRON_KEYPAD_DEVICE_REGISTRY_DEVICE_ID] = dr_device.id dr_device_id_to_keypad[dr_device.id] = keypad button_name = _get_button_name(keypad, bridge_button) keypad_lutron_device_id = keypad[LUTRON_KEYPAD_LUTRON_DEVICE_ID] # Add button to parent keypad, and build keypad_buttons and keypad_button_names_to_leap keypad_buttons[button_lutron_device_id] = LutronButton( lutron_device_id=button_lutron_device_id, leap_button_number=leap_button_number, button_name=button_name, led_device_id=button_led_device_id, parent_keypad=keypad_lutron_device_id, ) keypad[LUTRON_KEYPAD_BUTTONS].append(button_lutron_device_id) button_name_to_leap = keypad_button_names_to_leap.setdefault( keypad_lutron_device_id, {} ) button_name_to_leap[button_name] = leap_button_number leap_to_button_name = leap_to_keypad_button_names.setdefault( keypad_lutron_device_id, {} ) leap_to_button_name[leap_button_number] = button_name keypad_trigger_schemas = _async_build_trigger_schemas(keypad_button_names_to_leap) _async_subscribe_keypad_events( hass=hass, bridge=bridge, keypads=keypads, keypad_buttons=keypad_buttons, leap_to_keypad_button_names=leap_to_keypad_button_names, ) return LutronKeypadData( dr_device_id_to_keypad, keypads, keypad_buttons, keypad_button_names_to_leap, keypad_trigger_schemas, )
Build device trigger schemas.
def _async_build_trigger_schemas( keypad_button_names_to_leap: dict[int, dict[str, int]], ) -> dict[int, vol.Schema]: """Build device trigger schemas.""" return { keypad_id: LUTRON_BUTTON_TRIGGER_SCHEMA.extend( { vol.Required(CONF_SUBTYPE): vol.In( keypad_button_names_to_leap[keypad_id] ), } ) for keypad_id in keypad_button_names_to_leap }
Get the LEAP button name and check for override.
def _get_button_name(keypad: LutronKeypad, bridge_button: dict[str, Any]) -> str: """Get the LEAP button name and check for override.""" button_number = bridge_button["button_number"] button_name = bridge_button.get("device_name") if button_name is None: # This is a Caseta Button retrieve name from hardcoded trigger definitions. return _get_button_name_from_triggers(keypad, button_number) keypad_model = keypad[LUTRON_KEYPAD_MODEL] if keypad_model_override := KEYPAD_LEAP_BUTTON_NAME_OVERRIDE.get(keypad_model): if alt_button_name := keypad_model_override.get(button_number): return alt_button_name return button_name
Retrieve the caseta button name from device triggers.
def _get_button_name_from_triggers(keypad: LutronKeypad, button_number: int) -> str: """Retrieve the caseta button name from device triggers.""" button_number_map = LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP.get(keypad["type"], {}) return ( button_number_map.get( button_number, f"button {button_number}", ) .replace("_", " ") .title() )
Return the full area name including parent(s).
def _area_name_from_id(areas: dict[str, dict], area_id: str | None) -> str: """Return the full area name including parent(s).""" if area_id is None: return UNASSIGNED_AREA return _construct_area_name_from_id(areas, area_id, [])
Recursively construct the full area name including parent(s).
def _construct_area_name_from_id( areas: dict[str, dict], area_id: str, labels: list[str] ) -> str: """Recursively construct the full area name including parent(s).""" area = areas[area_id] parent_area_id = area["parent_id"] if parent_area_id is None: # This is the root area, return last area return " ".join(labels) labels.insert(0, area["name"]) return _construct_area_name_from_id(areas, parent_area_id, labels)
Get the LIP button for a given LEAP button.
def async_get_lip_button(device_type: str, leap_button: int) -> int | None: """Get the LIP button for a given LEAP button.""" if ( lip_buttons_name_to_num := DEVICE_TYPE_SUBTYPE_MAP_TO_LIP.get(device_type) ) is None or ( leap_button_num_to_name := LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP.get(device_type) ) is None: return None return lip_buttons_name_to_num[leap_button_num_to_name[leap_button]]
Subscribe to lutron events.
def _async_subscribe_keypad_events( hass: HomeAssistant, bridge: Smartbridge, keypads: dict[int, LutronKeypad], keypad_buttons: dict[int, LutronButton], leap_to_keypad_button_names: dict[int, dict[int, str]], ): """Subscribe to lutron events.""" @callback def _async_button_event(button_id, event_type): if not (button := keypad_buttons.get(button_id)) or not ( keypad := keypads.get(button["parent_keypad"]) ): return if event_type == BUTTON_STATUS_PRESSED: action = ACTION_PRESS else: action = ACTION_RELEASE keypad_type = keypad[LUTRON_KEYPAD_TYPE] keypad_device_id = keypad[LUTRON_KEYPAD_LUTRON_DEVICE_ID] leap_button_number = button[LUTRON_BUTTON_LEAP_BUTTON_NUMBER] lip_button_number = async_get_lip_button(keypad_type, leap_button_number) button_type = LEAP_TO_DEVICE_TYPE_SUBTYPE_MAP.get( keypad_type, leap_to_keypad_button_names[keypad_device_id] )[leap_button_number] hass.bus.async_fire( LUTRON_CASETA_BUTTON_EVENT, { ATTR_SERIAL: keypad[LUTRON_KEYPAD_SERIAL], ATTR_TYPE: keypad_type, ATTR_BUTTON_NUMBER: lip_button_number, ATTR_LEAP_BUTTON_NUMBER: leap_button_number, ATTR_DEVICE_NAME: keypad[LUTRON_KEYPAD_NAME], ATTR_DEVICE_ID: keypad[LUTRON_KEYPAD_DEVICE_REGISTRY_DEVICE_ID], ATTR_AREA_NAME: keypad[LUTRON_KEYPAD_AREA_NAME], ATTR_BUTTON_TYPE: button_type, ATTR_ACTION: action, }, ) for button_id in keypad_buttons: bridge.add_button_subscriber( str(button_id), lambda event_type, button_id=button_id: _async_button_event( button_id, event_type ), )
Convert a lutron caseta identifier to a device identifier.
def _id_to_identifier(lutron_id: str) -> tuple[str, str]: """Convert a lutron caseta identifier to a device identifier.""" return (DOMAIN, lutron_id)
Set up LW-12 WiFi LED Controller platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up LW-12 WiFi LED Controller platform.""" # Assign configuration variables. name = config.get(CONF_NAME) host = config.get(CONF_HOST) port = config.get(CONF_PORT) # Add devices lw12_light = lw12.LW12Controller(host, port) add_entities([LW12WiFi(name, lw12_light)])
Get status of the setpoint.
def get_setpoint_status(status: str, time: str) -> str | None: """Get status of the setpoint.""" if status == PRESET_HOLD_UNTIL: return f"Held until {time}" return LYRIC_SETPOINT_STATUS_NAMES.get(status)
Get datetime from future time provided.
def get_datetime_from_future_time(time_str: str) -> datetime: """Get datetime from future time provided.""" time = dt_util.parse_time(time_str) if time is None: raise ValueError(f"Unable to parse time {time_str}") now = dt_util.utcnow() if time <= now.time(): now = now + timedelta(days=1) return dt_util.as_utc(datetime.combine(now.date(), time))
Get the Mailgun notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> MailgunNotificationService | None: """Get the Mailgun notification service.""" data = hass.data[MAILGUN_DOMAIN] mailgun_service = MailgunNotificationService( data.get(CONF_DOMAIN), data.get(CONF_SANDBOX), data.get(CONF_API_KEY), config.get(CONF_SENDER), config.get(CONF_RECIPIENT), ) if mailgun_service.connection_is_valid(): return mailgun_service return None
Validate the state.
def _state_validator(config): """Validate the state.""" for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: config[state] = config[state] | {CONF_DELAY_TIME: config[CONF_DELAY_TIME]} if CONF_TRIGGER_TIME not in config[state]: config[state] = config[state] | { CONF_TRIGGER_TIME: config[CONF_TRIGGER_TIME] } for state in SUPPORTED_ARMING_STATES: if CONF_ARMING_TIME not in config[state]: config[state] = config[state] | {CONF_ARMING_TIME: config[CONF_ARMING_TIME]} return config
Validate the state.
def _state_schema(state): """Validate the state.""" schema = {} if state in SUPPORTED_PRETRIGGER_STATES: schema[vol.Optional(CONF_DELAY_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) schema[vol.Optional(CONF_TRIGGER_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) if state in SUPPORTED_ARMING_STATES: schema[vol.Optional(CONF_ARMING_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) return vol.Schema(schema)
Set up the manual alarm platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the manual alarm platform.""" add_entities( [ ManualAlarm( hass, config[CONF_NAME], config.get(CONF_CODE), config.get(CONF_CODE_TEMPLATE), config.get(CONF_CODE_ARM_REQUIRED), config.get(CONF_DISARM_AFTER_TRIGGER, DEFAULT_DISARM_AFTER_TRIGGER), config, ) ] )
Validate the state.
def _state_validator(config): """Validate the state.""" for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: config[state] = config[state] | {CONF_DELAY_TIME: config[CONF_DELAY_TIME]} if CONF_TRIGGER_TIME not in config[state]: config[state] = config[state] | { CONF_TRIGGER_TIME: config[CONF_TRIGGER_TIME] } for state in SUPPORTED_PENDING_STATES: if CONF_PENDING_TIME not in config[state]: config[state] = config[state] | { CONF_PENDING_TIME: config[CONF_PENDING_TIME] } return config
Validate the state.
def _state_schema(state): """Validate the state.""" schema = {} if state in SUPPORTED_PRETRIGGER_STATES: schema[vol.Optional(CONF_DELAY_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) schema[vol.Optional(CONF_TRIGGER_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) if state in SUPPORTED_PENDING_STATES: schema[vol.Optional(CONF_PENDING_TIME)] = vol.All( cv.time_period, cv.positive_timedelta ) return vol.Schema(schema)
Set up MaryTTS speech component.
def get_engine(hass, config, discovery_info=None): """Set up MaryTTS speech component.""" return MaryTTSProvider(hass, config)
Get the Mastodon notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> MastodonNotificationService | None: """Get the Mastodon notification service.""" client_id = config.get(CONF_CLIENT_ID) client_secret = config.get(CONF_CLIENT_SECRET) access_token = config.get(CONF_ACCESS_TOKEN) base_url = config.get(CONF_BASE_URL) try: mastodon = Mastodon( client_id=client_id, client_secret=client_secret, access_token=access_token, api_base_url=base_url, ) mastodon.account_verify_credentials() except MastodonUnauthorizedError: LOGGER.warning("Authentication failed") return None return MastodonNotificationService(mastodon)
Get the Matrix notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> MatrixNotificationService: """Get the Matrix notification service.""" return MatrixNotificationService(config[CONF_DEFAULT_ROOM])
Strip spaces and null char from the name.
def get_clean_name(name: str | None) -> str | None: """Strip spaces and null char from the name.""" if name is None: return name name = name.replace("\x00", "") return name.strip() or None
Get the add-on manager.
def get_addon_manager(hass: HomeAssistant) -> AddonManager: """Get the add-on manager.""" return AddonManager(hass, LOGGER, "Matter Server", ADDON_SLUG)
Register all of our api endpoints.
def async_register_api(hass: HomeAssistant) -> None: """Register all of our api endpoints.""" websocket_api.async_register_command(hass, websocket_commission) websocket_api.async_register_command(hass, websocket_commission_on_network) websocket_api.async_register_command(hass, websocket_set_thread_dataset) websocket_api.async_register_command(hass, websocket_set_wifi_credentials) websocket_api.async_register_command(hass, websocket_node_diagnostics) websocket_api.async_register_command(hass, websocket_ping_node) websocket_api.async_register_command(hass, websocket_open_commissioning_window) websocket_api.async_register_command(hass, websocket_remove_matter_fabric) websocket_api.async_register_command(hass, websocket_interview_node)
Decorate async function to get node.
def async_get_node( func: Callable[ [HomeAssistant, ActiveConnection, dict[str, Any], MatterAdapter, MatterNode], Coroutine[Any, Any, None], ], ) -> Callable[ [HomeAssistant, ActiveConnection, dict[str, Any], MatterAdapter], Coroutine[Any, Any, None], ]: """Decorate async function to get node.""" @wraps(func) async def async_get_node_func( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any], matter: MatterAdapter, ) -> None: """Provide user specific data and store to function.""" node = node_from_ha_device_id(hass, msg[DEVICE_ID]) if not node: raise MissingNode( f"Could not resolve Matter node from device id {msg[DEVICE_ID]}" ) await func(hass, connection, msg, matter, node) return async_get_node_func
Decorate function to get the MatterAdapter.
def async_get_matter_adapter( func: Callable[ [HomeAssistant, ActiveConnection, dict[str, Any], MatterAdapter], Coroutine[Any, Any, None], ], ) -> Callable[ [HomeAssistant, ActiveConnection, dict[str, Any]], Coroutine[Any, Any, None] ]: """Decorate function to get the MatterAdapter.""" @wraps(func) async def _get_matter( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Provide the Matter client to the function.""" matter = get_matter(hass) await func(hass, connection, msg, matter) return _get_matter
Decorate function to handle MatterError and send relevant error.
def async_handle_failed_command( func: Callable[ Concatenate[HomeAssistant, ActiveConnection, dict[str, Any], _P], Coroutine[Any, Any, None], ], ) -> Callable[ Concatenate[HomeAssistant, ActiveConnection, dict[str, Any], _P], Coroutine[Any, Any, None], ]: """Decorate function to handle MatterError and send relevant error.""" @wraps(func) async def async_handle_failed_command_func( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any], *args: _P.args, **kwargs: _P.kwargs, ) -> None: """Handle MatterError within function and send relevant error.""" try: await func(hass, connection, msg, *args, **kwargs) except MatterError as err: connection.send_error(msg[ID], str(err.error_code), err.args[0]) except MissingNode as err: connection.send_error(msg[ID], ERROR_NODE_NOT_FOUND, err.args[0]) return async_handle_failed_command_func
Return a schema for the manual step.
def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema: """Return a schema for the manual step.""" default_url = user_input.get(CONF_URL, DEFAULT_URL) return vol.Schema({vol.Required(CONF_URL, default=default_url): str})
Return the websocket address.
def build_ws_address(host: str, port: int) -> str: """Return the websocket address.""" return f"ws://{host}:{port}/ws"
Redact Matter cluster attribute.
def redact_matter_attributes(node_data: dict[str, Any]) -> dict[str, Any]: """Redact Matter cluster attribute.""" redacted = deepcopy(node_data) for attribute_to_redact in ATTRIBUTES_TO_REDACT: for attribute_path in redacted["attributes"]: _, cluster_id, attribute_id = parse_attribute_path(attribute_path) if cluster_id != attribute_to_redact.cluster_id: continue if attribute_id != attribute_to_redact.attribute_id: continue redacted["attributes"][attribute_path] = REDACTED return redacted
Remove serialization type from data.
def remove_serialization_type(data: dict[str, Any]) -> dict[str, Any]: """Remove serialization type from data.""" if "_type" in data: data.pop("_type") return data