response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Iterate over all available discovery schemas.
def iter_schemas() -> Generator[MatterDiscoverySchema, None, None]: """Iterate over all available discovery schemas.""" for platform_schemas in DISCOVERY_SCHEMAS.values(): yield from platform_schemas
Run discovery on MatterEndpoint and return matching MatterEntityInfo(s).
def async_discover_entities( endpoint: MatterEndpoint, ) -> Generator[MatterEntityInfo, None, None]: """Run discovery on MatterEndpoint and return matching MatterEntityInfo(s).""" discovered_attributes: set[type[ClusterAttributeDescriptor]] = set() device_info = endpoint.device_info for schema in iter_schemas(): # abort if attribute(s) already discovered if any(x in schema.required_attributes for x in discovered_attributes): continue # check vendor_id if ( schema.vendor_id is not None and device_info.vendorID not in schema.vendor_id ): continue # check product_name if ( schema.product_name is not None and device_info.productName not in schema.product_name ): continue # check required device_type if schema.device_type is not None and not any( x in schema.device_type for x in endpoint.device_types ): continue # check absent device_type if schema.not_device_type is not None and any( x in schema.not_device_type for x in endpoint.device_types ): continue # check endpoint_id if ( schema.endpoint_id is not None and endpoint.endpoint_id not in schema.endpoint_id ): continue # check required attributes if schema.required_attributes is not None and not all( endpoint.has_attribute(None, val_schema) for val_schema in schema.required_attributes ): continue # check for values that may not be present if schema.absent_attributes is not None and any( endpoint.has_attribute(None, val_schema) for val_schema in schema.absent_attributes ): continue # all checks passed, this value belongs to an entity attributes_to_watch = list(schema.required_attributes) if schema.optional_attributes: # check optional attributes for optional_attribute in schema.optional_attributes: if optional_attribute in attributes_to_watch: continue if endpoint.has_attribute(None, optional_attribute): attributes_to_watch.append(optional_attribute) yield MatterEntityInfo( endpoint=endpoint, platform=schema.platform, attributes_to_watch=attributes_to_watch, entity_description=schema.entity_description, entity_class=schema.entity_class, should_poll=schema.should_poll, ) # prevent re-discovery of the primary attribute if not allowed if not schema.allow_multi: discovered_attributes.update(schema.required_attributes)
Return MatterAdapter instance.
def get_matter(hass: HomeAssistant) -> MatterAdapter: """Return MatterAdapter instance.""" # NOTE: This assumes only one Matter connection/fabric can exist. # Shall we support connecting to multiple servers in the client or by # config entries? In case of the config entry we need to fix this. matter_entry_data: MatterEntryData = next(iter(hass.data[DOMAIN].values())) return matter_entry_data.adapter
Return `Operational Instance Name` for given MatterNode.
def get_operational_instance_id( server_info: ServerInfoMessage, node: MatterNode, ) -> str: """Return `Operational Instance Name` for given MatterNode.""" fabric_id_hex = f"{server_info.compressed_fabric_id:016X}" node_id_hex = f"{node.node_id:016X}" # Operational instance id matches the mDNS advertisement for the node # this is the recommended ID to recognize a unique matter node (within a fabric). return f"{fabric_id_hex}-{node_id_hex}"
Return HA device_id for the given MatterEndpoint.
def get_device_id( server_info: ServerInfoMessage, endpoint: MatterEndpoint, ) -> str: """Return HA device_id for the given MatterEndpoint.""" operational_instance_id = get_operational_instance_id(server_info, endpoint.node) # Append endpoint ID if this endpoint is a bridged or composed device if endpoint.is_composed_device: compose_parent = endpoint.node.get_compose_parent(endpoint.endpoint_id) assert compose_parent is not None postfix = str(compose_parent.endpoint_id) elif endpoint.is_bridged_device: postfix = str(endpoint.endpoint_id) else: # this should be compatible with previous versions postfix = "MatterNodeDevice" return f"{operational_instance_id}-{postfix}"
Get node id from ha device id.
def node_from_ha_device_id(hass: HomeAssistant, ha_device_id: str) -> MatterNode | None: """Get node id from ha device id.""" dev_reg = dr.async_get(hass) device = dev_reg.async_get(ha_device_id) if device is None: raise MissingNode(f"Invalid device ID: {ha_device_id}") return get_node_from_device_entry(hass, device)
Return MatterNode from device entry.
def get_node_from_device_entry( hass: HomeAssistant, device: dr.DeviceEntry ) -> MatterNode | None: """Return MatterNode from device entry.""" matter = get_matter(hass) device_id_type_prefix = f"{ID_TYPE_DEVICE_ID}_" device_id_full = next( ( identifier[1] for identifier in device.identifiers if identifier[0] == DOMAIN and identifier[1].startswith(device_id_type_prefix) ), None, ) if device_id_full is None: return None device_id = device_id_full.lstrip(device_id_type_prefix) matter_client = matter.matter_client server_info = matter_client.server_info if server_info is None: raise RuntimeError("Matter server information is not available") return next( ( node for node in matter_client.get_nodes() for endpoint in node.endpoints.values() if get_device_id(server_info, endpoint) == device_id ), None, )
Change value from from_range to to_range.
def renormalize( number: float, from_range: tuple[float, float], to_range: tuple[float, float] ) -> float: """Change value from from_range to to_range.""" delta1 = from_range[1] - from_range[0] delta2 = to_range[1] - to_range[0] return (delta2 * (number - from_range[0]) / delta1) + to_range[0]
Convert Home Assistant HS to Matter HS.
def convert_to_matter_hs(hass_hs: tuple[float, float]) -> tuple[float, float]: """Convert Home Assistant HS to Matter HS.""" return ( hass_hs[0] / 360 * 254, renormalize(hass_hs[1], (0, 100), (0, 254)), )
Convert Matter HS to Home Assistant HS.
def convert_to_hass_hs(matter_hs: tuple[float, float]) -> tuple[float, float]: """Convert Matter HS to Home Assistant HS.""" return ( matter_hs[0] * 360 / 254, renormalize(matter_hs[1], (0, 254), (0, 100)), )
Convert Home Assistant XY to Matter XY.
def convert_to_matter_xy(hass_xy: tuple[float, float]) -> tuple[float, float]: """Convert Home Assistant XY to Matter XY.""" return (hass_xy[0] * XY_COLOR_FACTOR, hass_xy[1] * XY_COLOR_FACTOR)
Convert Matter XY to Home Assistant XY.
def convert_to_hass_xy(matter_xy: tuple[float, float]) -> tuple[float, float]: """Convert Matter XY to Home Assistant XY.""" return (matter_xy[0] / XY_COLOR_FACTOR, matter_xy[1] / XY_COLOR_FACTOR)
Return Matter device info or None if device does not exist.
def get_matter_device_info( hass: HomeAssistant, device_id: str ) -> MatterDeviceInfo | None: """Return Matter device info or None if device does not exist.""" # Test hass.data[DOMAIN] to ensure config entry is set up if not hass.data.get(DOMAIN, False) or not ( node := node_from_ha_device_id(hass, device_id) ): return None return MatterDeviceInfo( unique_id=node.device_info.uniqueID, vendor_id=hex(node.device_info.vendorID), product_id=hex(node.device_info.productID), )
Ensure that Matter Server add-on is updated and running. May only be used as part of async_setup_entry above.
def _get_addon_manager(hass: HomeAssistant) -> AddonManager: """Ensure that Matter Server add-on is updated and running. May only be used as part of async_setup_entry above. """ addon_manager: AddonManager = get_addon_manager(hass) if addon_manager.task_in_progress(): raise ConfigEntryNotReady return addon_manager
Iterate through all MAX! Devices and add window shutters.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Iterate through all MAX! Devices and add window shutters.""" devices: list[MaxCubeBinarySensorBase] = [] for handler in hass.data[DATA_KEY].values(): for device in handler.cube.devices: devices.append(MaxCubeBattery(handler, device)) # Only add Window Shutters if device.is_windowshutter(): devices.append(MaxCubeShutter(handler, device)) add_entities(devices)
Iterate through all MAX! Devices and add thermostats.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Iterate through all MAX! Devices and add thermostats.""" add_entities( MaxCubeClimate(handler, device) for handler in hass.data[DATA_KEY].values() for device in handler.cube.devices if device.is_thermostat() or device.is_wallthermostat() )
Establish connection to MAX! Cube.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Establish connection to MAX! Cube.""" if DATA_KEY not in hass.data: hass.data[DATA_KEY] = {} connection_failed = 0 gateways = config[DOMAIN][CONF_GATEWAYS] for gateway in gateways: host = gateway[CONF_HOST] port = gateway[CONF_PORT] scan_interval = gateway[CONF_SCAN_INTERVAL].total_seconds() try: cube = MaxCube(host, port, now=now) hass.data[DATA_KEY][host] = MaxCubeHandle(cube, scan_interval) except TimeoutError as ex: _LOGGER.error("Unable to connect to Max!Cube gateway: %s", str(ex)) persistent_notification.create( hass, ( f"Error: {ex}<br />You will need to restart Home Assistant after" " fixing." ), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID, ) connection_failed += 1 if connection_failed >= len(gateways): return False load_platform(hass, Platform.CLIMATE, DOMAIN, {}, config) load_platform(hass, Platform.BINARY_SENSOR, DOMAIN, {}, config) return True
Convert elapsed time to timestamp.
def _elapsed_time_to_timestamp(probe: MeaterProbe) -> datetime | None: """Convert elapsed time to timestamp.""" if not probe.cook or not hasattr(probe.cook, "time_elapsed"): return None return dt_util.utcnow() - timedelta(seconds=probe.cook.time_elapsed)
Convert remaining time to timestamp.
def _remaining_time_to_timestamp(probe: MeaterProbe) -> datetime | None: """Convert remaining time to timestamp.""" if ( not probe.cook or not hasattr(probe.cook, "time_remaining") or probe.cook.time_remaining < 0 ): return None return dt_util.utcnow() + timedelta(seconds=probe.cook.time_remaining)
Return the best quality stream. As per https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/extractor/common.py#L128.
def get_best_stream(formats: list[dict[str, Any]]) -> str: """Return the best quality stream. As per https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/extractor/common.py#L128. """ return cast(str, formats[len(formats) - 1]["url"])
YouTube responses also include files with only video or audio. So we filter on files with both audio and video codec.
def get_best_stream_youtube(formats: list[dict[str, Any]]) -> str: """YouTube responses also include files with only video or audio. So we filter on files with both audio and video codec. """ return get_best_stream( [ stream_format for stream_format in formats if stream_format.get("acodec", "none") != "none" and stream_format.get("vcodec", "none") != "none" ] )
Update a media URL with authentication if it points at Home Assistant.
def async_process_play_media_url( hass: HomeAssistant, media_content_id: str, *, allow_relative_url: bool = False, for_supervisor_network: bool = False, ) -> str: """Update a media URL with authentication if it points at Home Assistant.""" parsed = yarl.URL(media_content_id) if parsed.scheme and parsed.scheme not in ("http", "https"): return media_content_id if parsed.is_absolute(): if not is_hass_url(hass, media_content_id): return media_content_id elif media_content_id[0] != "/": return media_content_id if parsed.query: logging.getLogger(__name__).debug( "Not signing path for content with query param" ) elif parsed.path.startswith(PATHS_WITHOUT_AUTH): # We don't sign this path if it doesn't need auth. Although signing itself can't # hurt, some devices are unable to handle long URLs and the auth signature might # push it over. pass else: signed_path = async_sign_path( hass, quote(parsed.path), timedelta(seconds=CONTENT_AUTH_EXPIRY_TIME), ) media_content_id = str(parsed.join(yarl.URL(signed_path))) # convert relative URL to absolute URL if not parsed.is_absolute() and not allow_relative_url: base_url = None if for_supervisor_network: base_url = get_supervisor_network_url(hass) if not base_url: try: base_url = get_url(hass) except NoURLAvailableError as err: msg = "Unable to determine Home Assistant URL to send to device" if ( hass.config.api and hass.config.api.use_ssl and (not hass.config.external_url or not hass.config.internal_url) ): msg += ". Configure internal and external URL in general settings." raise HomeAssistantError(msg) from err media_content_id = f"{base_url}{media_content_id}" return media_content_id
Create a function to test a device condition.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == "is_buffering": state = STATE_BUFFERING elif config[CONF_TYPE] == "is_idle": state = STATE_IDLE elif config[CONF_TYPE] == "is_off": state = STATE_OFF elif config[CONF_TYPE] == "is_on": state = STATE_ON elif config[CONF_TYPE] == "is_paused": state = STATE_PAUSED else: # is_playing state = STATE_PLAYING registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" return condition.state(hass, entity_id, state) return test_is_state
Describe group on off states.
def async_describe_on_off_states( hass: HomeAssistant, registry: "GroupIntegrationRegistry" ) -> None: """Describe group on off states.""" registry.on_off_states( DOMAIN, { STATE_ON, STATE_PAUSED, STATE_PLAYING, STATE_IDLE, }, STATE_ON, STATE_OFF, )
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" if old_state != new_state: return True old_attrs_s = set( {k: v for k, v in old_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) new_attrs_s = set( {k: v for k, v in new_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) changed_attrs: set[str] = {item[0] for item in old_attrs_s ^ new_attrs_s} for attr_name in changed_attrs: if attr_name != ATTR_MEDIA_VOLUME_LEVEL: return True old_attr_value = old_attrs.get(attr_name) new_attr_value = new_attrs.get(attr_name) if new_attr_value is None or not check_valid_float(new_attr_value): # New attribute value is invalid, ignore it continue if old_attr_value is None or not check_valid_float(old_attr_value): # Old attribute value was invalid, we should report again return True if check_absolute_change(old_attr_value, new_attr_value, 0.1): return True # no significant attribute change detected return False
Return true if specified media player entity_id is on. Check all media player if no entity_id specified.
def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool: """Return true if specified media player entity_id is on. Check all media player if no entity_id specified. """ entity_ids = [entity_id] if entity_id else hass.states.entity_ids(DOMAIN) return any( not hass.states.is_state(entity_id, MediaPlayerState.OFF) for entity_id in entity_ids )
Create validator that renames keys. Necessary because the service schema names do not match the command parameters. Async friendly.
def _rename_keys(**keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]: """Create validator that renames keys. Necessary because the service schema names do not match the command parameters. Async friendly. """ def rename(value: dict[str, Any]) -> dict[str, Any]: for to_key, from_key in keys.items(): if from_key in value: value[to_key] = value.pop(from_key) return value return rename
Create hash for media image url.
def _url_hash(url: str) -> str: """Create hash for media image url.""" return hashlib.sha256(url.encode("utf-8")).hexdigest()[:16]
Set up local media source.
def async_setup(hass: HomeAssistant) -> None: """Set up local media source.""" source = LocalSource(hass) hass.data[DOMAIN][DOMAIN] = source hass.http.register_view(LocalMediaView(hass, source)) hass.http.register_view(UploadMediaView(hass, source)) websocket_api.async_register_command(hass, websocket_remove_media)
Test if identifier is a media source.
def is_media_source_id(media_content_id: str) -> bool: """Test if identifier is a media source.""" return URI_SCHEME_REGEX.match(media_content_id) is not None
Generate a media source ID.
def generate_media_source_id(domain: str, identifier: str) -> str: """Generate a media source ID.""" uri = f"{URI_SCHEME}{domain or ''}" if identifier: uri += f"/{identifier}" return uri
Return media item.
def _get_media_item( hass: HomeAssistant, media_content_id: str | None, target_media_player: str | None ) -> MediaSourceItem: """Return media item.""" if media_content_id: item = MediaSourceItem.from_uri(hass, media_content_id, target_media_player) else: # We default to our own domain if its only one registered domain = None if len(hass.data[DOMAIN]) > 1 else DOMAIN return MediaSourceItem(hass, domain, "", target_media_player) if item.domain is not None and item.domain not in hass.data[DOMAIN]: raise ValueError("Unknown media source") return item
Get descriptions for valves.
def get_entities_for_valves( coordinator: MelnorDataUpdateCoordinator, descriptions: list[T], function: Callable[ [Valve, T], CoordinatorEntity[MelnorDataUpdateCoordinator], ], ) -> list[CoordinatorEntity[MelnorDataUpdateCoordinator]]: """Get descriptions for valves.""" entities: list[CoordinatorEntity[MelnorDataUpdateCoordinator]] = [] # This device may not have 4 valves total, but the library will only expose the right number of valves for i in range(1, 5): valve = coordinator.data[f"zone{i}"] if valve is not None: entities.extend( function(valve, description) for description in descriptions ) return entities
Calculate the number of minutes left in the current watering cycle.
def watering_seconds_left(valve: Valve) -> datetime | None: """Calculate the number of minutes left in the current watering cycle.""" if valve.is_watering is not True or dt_util.now() > dt_util.utc_from_timestamp( valve.watering_end_time ): return None return dt_util.utc_from_timestamp(valve.watering_end_time)
Return the value of the next_cycle date, only if the cycle is enabled.
def next_cycle(valve: Valve) -> datetime | None: """Return the value of the next_cycle date, only if the cycle is enabled.""" if valve.schedule_enabled is True: return valve.next_cycle return None
Get the MessageBird notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> MessageBirdNotificationService | None: """Get the MessageBird notification service.""" client = messagebird.Client(config[CONF_API_KEY]) try: # validates the api key client.balance() except messagebird.client.ErrorException: _LOGGER.error("The specified MessageBird API key is invalid") return None return MessageBirdNotificationService(config.get(CONF_SENDER), client)
Return a set of configured met.no instances.
def configured_instances(hass: HomeAssistant) -> set[str]: """Return a set of configured met.no instances.""" entries = [] for entry in hass.config_entries.async_entries(DOMAIN): if entry.data.get("track_home"): entries.append("home") continue entries.append( f"{entry.data.get(CONF_LATITUDE)}-{entry.data.get(CONF_LONGITUDE)}" ) return set(entries)
Get a schema with default values.
def _get_data_schema( hass: HomeAssistant, config_entry: ConfigEntry | None = None ) -> vol.Schema: """Get a schema with default values.""" # If tracking home or no config entry is passed in, default value come from Home location if config_entry is None or config_entry.data.get(CONF_TRACK_HOME, False): return vol.Schema( { vol.Required(CONF_NAME, default=HOME_LOCATION_NAME): str, vol.Required(CONF_LATITUDE, default=hass.config.latitude): cv.latitude, vol.Required( CONF_LONGITUDE, default=hass.config.longitude ): cv.longitude, vol.Required( CONF_ELEVATION, default=hass.config.elevation ): NumberSelector( NumberSelectorConfig( mode=NumberSelectorMode.BOX, unit_of_measurement=UnitOfLength.METERS, ) ), } ) # Not tracking home, default values come from config entry return vol.Schema( { vol.Required(CONF_NAME, default=config_entry.data.get(CONF_NAME)): str, vol.Required( CONF_LATITUDE, default=config_entry.data.get(CONF_LATITUDE) ): cv.latitude, vol.Required( CONF_LONGITUDE, default=config_entry.data.get(CONF_LONGITUDE) ): cv.longitude, vol.Required( CONF_ELEVATION, default=config_entry.data.get(CONF_ELEVATION) ): NumberSelector( NumberSelectorConfig( mode=NumberSelectorMode.BOX, unit_of_measurement=UnitOfLength.METERS, ) ), } )
Calculate unique ID.
def _calculate_unique_id(config: MappingProxyType[str, Any], hourly: bool) -> str: """Calculate unique ID.""" name_appendix = "" if hourly: name_appendix = "-hourly" if config.get(CONF_TRACK_HOME): return f"home{name_appendix}" return f"{config[CONF_LATITUDE]}-{config[CONF_LONGITUDE]}{name_appendix}"
Return condition from dict CONDITIONS_MAP.
def format_condition(condition: str) -> str: """Return condition from dict CONDITIONS_MAP.""" for key, value in CONDITIONS_MAP.items(): if condition in value: return key return condition
Set up the MeteoAlarm binary sensor platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the MeteoAlarm binary sensor platform.""" country = config[CONF_COUNTRY] province = config[CONF_PROVINCE] language = config[CONF_LANGUAGE] name = config[CONF_NAME] try: api = Meteoalert(country, province, language) except KeyError: _LOGGER.error("Wrong country digits or province name") return add_entities([MeteoAlertBinarySensor(api, name)], True)
Return condition from dict CONDITION_MAP.
def format_condition(condition): """Return condition from dict CONDITION_MAP.""" if condition in CONDITION_MAP: return CONDITION_MAP[condition] if isinstance(condition, Condition): return condition.value return condition
Search the first not None value in the first forecast elements.
def _find_first_probability_forecast_not_null( probability_forecast: list, path: list ) -> int | None: """Search the first not None value in the first forecast elements.""" for forecast in probability_forecast[0:3]: if forecast[path[1]][path[2]] is not None: return forecast[path[1]][path[2]] # Default return value if no value founded return None
Return condition from dict CONDITION_MAP.
def format_condition(condition: str): """Return condition from dict CONDITION_MAP.""" return CONDITION_MAP.get(condition, condition)
Fetch site information from Datapoint API.
def fetch_site( connection: datapoint.Manager, latitude: float, longitude: float ) -> Site | None: """Fetch site information from Datapoint API.""" try: return connection.get_nearest_forecast_site( latitude=latitude, longitude=longitude ) except datapoint.exceptions.APIException as err: _LOGGER.error("Received error from Met Office Datapoint: %s", err) return None
Fetch weather and forecast from Datapoint API.
def fetch_data(connection: datapoint.Manager, site: Site, mode: str) -> MetOfficeData: """Fetch weather and forecast from Datapoint API.""" try: forecast = connection.get_forecast_for_site(site.location_id, mode) except (ValueError, datapoint.exceptions.APIException) as err: _LOGGER.error("Check Met Office connection: %s", err.args) raise UpdateFailed from err time_now = utcnow() return MetOfficeData( now=forecast.now(), forecast=[ timestep for day in forecast.days for timestep in day.timesteps if timestep.date > time_now and ( mode == MODE_3HOURLY or timestep.date.hour > 6 ) # ensures only one result per day in MODE_DAILY ], site=site, )
Calculate unique ID.
def _calculate_unique_id(coordinates: str, use_3hourly: bool) -> str: """Calculate unique ID.""" if use_3hourly: return coordinates return f"{coordinates}_{MODE_DAILY}"
Return device registry information.
def get_device_info(coordinates: str, name: str) -> DeviceInfo: """Return device registry information.""" return DeviceInfo( entry_type=dr.DeviceEntryType.SERVICE, identifiers={(DOMAIN, coordinates)}, manufacturer="Met Office", name=f"Met Office {name}", )
Map the conditions provided by the weather API to those supported by the frontend.
def format_condition(condition: str | None) -> str | None: """Map the conditions provided by the weather API to those supported by the frontend.""" if condition is not None: for key, value in CONDITION_MAP.items(): if condition in value: return key return condition
Calculate unique ID.
def _calculate_unique_id(config: MappingProxyType[str, Any], hourly: bool) -> str: """Calculate unique ID.""" name_appendix = "" if hourly: name_appendix = "-hourly" return f"{config[CONF_LATITUDE]}-{config[CONF_LONGITUDE]}{name_appendix}"
Set up mFi sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up mFi sensors.""" host = config.get(CONF_HOST) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) use_tls = config.get(CONF_SSL) verify_tls = config.get(CONF_VERIFY_SSL) default_port = 6443 if use_tls else 6080 port = int(config.get(CONF_PORT, default_port)) try: client = MFiClient( host, username, password, port=port, use_tls=use_tls, verify=verify_tls ) except (FailedToLogin, requests.exceptions.ConnectionError) as ex: _LOGGER.error("Unable to connect to mFi: %s", str(ex)) return add_entities( MfiSensor(port, hass) for device in client.get_devices() for port in device.ports.values() if port.model in SENSOR_MODELS )
Set up mFi sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up mFi sensors.""" host = config.get(CONF_HOST) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) use_tls = config[CONF_SSL] verify_tls = config.get(CONF_VERIFY_SSL) default_port = 6443 if use_tls else 6080 port = int(config.get(CONF_PORT, default_port)) try: client = MFiClient( host, username, password, port=port, use_tls=use_tls, verify=verify_tls ) except (FailedToLogin, requests.exceptions.ConnectionError) as ex: _LOGGER.error("Unable to connect to mFi: %s", str(ex)) return add_entities( MfiSwitch(port) for device in client.get_devices() for port in device.ports.values() if port.model in SWITCH_MODELS )
Set up Microsoft speech component.
def get_engine(hass, config, discovery_info=None): """Set up Microsoft speech component.""" return MicrosoftProvider( config[CONF_API_KEY], config[CONF_LANG], config[CONF_GENDER], config[CONF_TYPE], config[CONF_RATE], config[CONF_VOLUME], config[CONF_PITCH], config[CONF_CONTOUR], config[CONF_REGION], )
Validate face attributes.
def validate_attributes(list_attributes): """Validate face attributes.""" for attr in list_attributes: if attr not in SUPPORTED_ATTRIBUTES: raise vol.Invalid(f"Invalid attribute {attr}") return list_attributes
Update tracked device state from the hub.
def update_items( coordinator: MikrotikDataUpdateCoordinator, async_add_entities: AddEntitiesCallback, tracked: dict[str, MikrotikDataUpdateCoordinatorTracker], ) -> None: """Update tracked device state from the hub.""" new_tracked: list[MikrotikDataUpdateCoordinatorTracker] = [] for mac, device in coordinator.api.devices.items(): if mac not in tracked: tracked[mac] = MikrotikDataUpdateCoordinatorTracker(device, coordinator) new_tracked.append(tracked[mac]) async_add_entities(new_tracked)
Connect to Mikrotik hub.
def get_api(entry: dict[str, Any]) -> librouteros.Api: """Connect to Mikrotik hub.""" _LOGGER.debug("Connecting to Mikrotik hub [%s]", entry[CONF_HOST]) _login_method = (login_plain, login_token) kwargs = {"login_methods": _login_method, "port": entry["port"], "encoding": "utf8"} if entry[CONF_VERIFY_SSL]: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE _ssl_wrapper = ssl_context.wrap_socket kwargs["ssl_wrapper"] = _ssl_wrapper try: api = librouteros.connect( entry[CONF_HOST], entry[CONF_USERNAME], entry[CONF_PASSWORD], **kwargs, ) except ( librouteros.exceptions.LibRouterosError, OSError, TimeoutError, ) as api_error: _LOGGER.error("Mikrotik %s error: %s", entry[CONF_HOST], api_error) if "invalid user name or password" in str(api_error): raise LoginError from api_error raise CannotConnect from api_error _LOGGER.debug("Connected to %s successfully", entry[CONF_HOST]) return api
Return players list as extra state attributes, if available.
def get_extra_state_attributes_players_list( data: MinecraftServerData, ) -> dict[str, list[str]]: """Return players list as extra state attributes, if available.""" extra_state_attributes: dict[str, Any] = {} players_list = data.players_list if players_list is not None and len(players_list) != 0: extra_state_attributes[ATTR_PLAYERS_LIST] = players_list return extra_state_attributes
Migrate the unique ID of an entity to the new format.
def _migrate_entity_unique_id(entity_entry: er.RegistryEntry) -> dict[str, Any]: """Migrate the unique ID of an entity to the new format.""" # Different variants of unique IDs are available in version 1: # 1) SRV record: '<host>-srv-<entity_type>' # 2) Host & port: '<host>-<port>-<entity_type>' # 3) IP address & port: '<mac_address>-<port>-<entity_type>' unique_id_pieces = entity_entry.unique_id.split("-") entity_type = unique_id_pieces[2] # Handle bug in version 1: Entity type names were used instead of # keys (e.g. "Protocol Version" instead of "protocol_version"). new_entity_type = entity_type.lower() new_entity_type = new_entity_type.replace(" ", "_") # Special case 'MOTD': Name and key differs. if new_entity_type == "world_message": new_entity_type = KEY_MOTD # Special case 'latency_time': Renamed to 'latency'. if new_entity_type == "latency_time": new_entity_type = KEY_LATENCY new_unique_id = f"{entity_entry.config_entry_id}-{new_entity_type}" _LOGGER.debug( "Migrating entity unique ID from %s to %s", entity_entry.unique_id, new_unique_id, ) return {"new_unique_id": new_unique_id}
Normalize object metadata by stripping the prefix.
def normalize_metadata(metadata: dict) -> dict: """Normalize object metadata by stripping the prefix.""" new_metadata = {} for meta_key, meta_value in metadata.items(): if not (match := _METADATA_RE.match(meta_key)): continue new_metadata[match.group(1).lower()] = meta_value return new_metadata
Create Minio client.
def create_minio_client( endpoint: str, access_key: str, secret_key: str, secure: bool ) -> Minio: """Create Minio client.""" return Minio( endpoint=endpoint, access_key=access_key, secret_key=secret_key, secure=secure )
Start listening to minio events. Copied from minio-py.
def get_minio_notification_response( minio_client, bucket_name: str, prefix: str, suffix: str, events: list[str] ): """Start listening to minio events. Copied from minio-py.""" query = {"prefix": prefix, "suffix": suffix, "events": events} # pylint: disable-next=protected-access return minio_client._url_open( "GET", bucket_name=bucket_name, query=query, preload_content=False )
Iterate over file records of notification event. Most of the time it should still be only one record.
def iterate_objects(event): """Iterate over file records of notification event. Most of the time it should still be only one record. """ records = event.get("Records", []) for record in records: event_name = record.get("eventName") bucket = record.get("s3", {}).get("bucket", {}).get("name") key = record.get("s3", {}).get("object", {}).get("key") metadata = normalize_metadata( record.get("s3", {}).get("object", {}).get("userMetadata", {}) ) if not bucket or not key: _LOGGER.warning("Invalid bucket and/or key, %s, %s", bucket, key) continue key = unquote(key) yield event_name, bucket, key, metadata
Set up MinioClient and event listeners.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up MinioClient and event listeners.""" conf = config[DOMAIN] host = conf[CONF_HOST] port = conf[CONF_PORT] access_key = conf[CONF_ACCESS_KEY] secret_key = conf[CONF_SECRET_KEY] secure = conf[CONF_SECURE] queue_listener = QueueListener(hass) queue = queue_listener.queue hass.bus.listen_once(EVENT_HOMEASSISTANT_START, queue_listener.start_handler) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, queue_listener.stop_handler) def _setup_listener(listener_conf): bucket = listener_conf[CONF_LISTEN_BUCKET] prefix = listener_conf[CONF_LISTEN_PREFIX] suffix = listener_conf[CONF_LISTEN_SUFFIX] events = listener_conf[CONF_LISTEN_EVENTS] minio_listener = MinioListener( queue, get_minio_endpoint(host, port), access_key, secret_key, secure, bucket, prefix, suffix, events, ) hass.bus.listen_once(EVENT_HOMEASSISTANT_START, minio_listener.start_handler) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, minio_listener.stop_handler) for listen_conf in conf[CONF_LISTEN]: _setup_listener(listen_conf) minio_client = create_minio_client( get_minio_endpoint(host, port), access_key, secret_key, secure ) def _render_service_value(service, key): value = service.data[key] value.hass = hass return value.async_render(parse_result=False) def put_file(service: ServiceCall) -> None: """Upload file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) file_path = _render_service_value(service, ATTR_FILE_PATH) if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") minio_client.fput_object(bucket, key, file_path) def get_file(service: ServiceCall) -> None: """Download file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) file_path = _render_service_value(service, ATTR_FILE_PATH) if not hass.config.is_allowed_path(file_path): raise ValueError(f"Invalid file_path {file_path}") minio_client.fget_object(bucket, key, file_path) def remove_file(service: ServiceCall) -> None: """Delete file service.""" bucket = _render_service_value(service, ATTR_BUCKET) key = _render_service_value(service, ATTR_KEY) minio_client.remove_object(bucket, key) hass.services.register(DOMAIN, "put", put_file, schema=BUCKET_KEY_FILE_SCHEMA) hass.services.register(DOMAIN, "get", get_file, schema=BUCKET_KEY_FILE_SCHEMA) hass.services.register(DOMAIN, "remove", remove_file, schema=BUCKET_KEY_SCHEMA) return True
Create minio endpoint from host and port.
def get_minio_endpoint(host: str, port: int) -> str: """Create minio endpoint from host and port.""" return f"{host}:{port}"
Calculate min value, honoring unknown states.
def calc_min(sensor_values: list[tuple[str, Any]]) -> tuple[str | None, float | None]: """Calculate min value, honoring unknown states.""" val: float | None = None entity_id: str | None = None for sensor_id, sensor_value in sensor_values: if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] and ( val is None or val > sensor_value ): entity_id, val = sensor_id, sensor_value return entity_id, val
Calculate max value, honoring unknown states.
def calc_max(sensor_values: list[tuple[str, Any]]) -> tuple[str | None, float | None]: """Calculate max value, honoring unknown states.""" val: float | None = None entity_id: str | None = None for sensor_id, sensor_value in sensor_values: if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] and ( val is None or val < sensor_value ): entity_id, val = sensor_id, sensor_value return entity_id, val
Calculate mean value, honoring unknown states.
def calc_mean(sensor_values: list[tuple[str, Any]], round_digits: int) -> float | None: """Calculate mean value, honoring unknown states.""" result = [ sensor_value for _, sensor_value in sensor_values if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] ] if not result: return None value: float = round(statistics.mean(result), round_digits) return value
Calculate median value, honoring unknown states.
def calc_median( sensor_values: list[tuple[str, Any]], round_digits: int ) -> float | None: """Calculate median value, honoring unknown states.""" result = [ sensor_value for _, sensor_value in sensor_values if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] ] if not result: return None value: float = round(statistics.median(result), round_digits) return value
Calculate range value, honoring unknown states.
def calc_range(sensor_values: list[tuple[str, Any]], round_digits: int) -> float | None: """Calculate range value, honoring unknown states.""" result = [ sensor_value for _, sensor_value in sensor_values if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] ] if not result: return None value: float = round(max(result) - min(result), round_digits) return value
Calculate a sum of values, not honoring unknown states.
def calc_sum(sensor_values: list[tuple[str, Any]], round_digits: int) -> float | None: """Calculate a sum of values, not honoring unknown states.""" result = 0 for _, sensor_value in sensor_values: if sensor_value in [STATE_UNKNOWN, STATE_UNAVAILABLE]: return None result += sensor_value value: float = round(result, round_digits) return value
Return MJPEG IP Camera schema.
def async_get_schema( defaults: dict[str, Any] | MappingProxyType[str, Any], show_name: bool = False ) -> vol.Schema: """Return MJPEG IP Camera schema.""" schema = { vol.Required(CONF_MJPEG_URL, default=defaults.get(CONF_MJPEG_URL)): str, vol.Optional( CONF_STILL_IMAGE_URL, description={"suggested_value": defaults.get(CONF_STILL_IMAGE_URL)}, ): str, vol.Optional( CONF_USERNAME, description={"suggested_value": defaults.get(CONF_USERNAME)}, ): str, vol.Optional( CONF_PASSWORD, default=defaults.get(CONF_PASSWORD, ""), ): str, vol.Optional( CONF_VERIFY_SSL, default=defaults.get(CONF_VERIFY_SSL, True), ): bool, } if show_name: schema = { vol.Required(CONF_NAME, default=defaults.get(CONF_NAME)): str, **schema, } return vol.Schema(schema)
Test if the given setting works as expected.
def validate_url( url: str, username: str | None, password: str, verify_ssl: bool, authentication: str = HTTP_BASIC_AUTHENTICATION, ) -> str: """Test if the given setting works as expected.""" auth: HTTPDigestAuth | HTTPBasicAuth | None = None if username and password: if authentication == HTTP_DIGEST_AUTHENTICATION: auth = HTTPDigestAuth(username, password) else: auth = HTTPBasicAuth(username, password) response = requests.get( url, auth=auth, stream=True, timeout=10, verify=verify_ssl, ) if response.status_code == HTTPStatus.UNAUTHORIZED: # If unauthorized, try again using digest auth if authentication == HTTP_BASIC_AUTHENTICATION: return validate_url( url, username, password, verify_ssl, HTTP_DIGEST_AUTHENTICATION ) raise InvalidAuth response.raise_for_status() response.close() return authentication
Filter header errors from urllib3 due to a urllib3 bug.
def filter_urllib3_logging() -> None: """Filter header errors from urllib3 due to a urllib3 bug.""" urllib3_logger = logging.getLogger("urllib3.connectionpool") if not any(isinstance(x, NoHeaderErrorFilter) for x in urllib3_logger.filters): urllib3_logger.addFilter(NoHeaderErrorFilter())
Convert a device key to an entity key.
def _device_key_to_bluetooth_entity_key( device_key: DeviceKey, ) -> PassiveBluetoothEntityKey: """Convert a device key to an entity key.""" return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
Convert a sensor update to a bluetooth data update.
def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, ) -> PassiveBluetoothDataUpdate: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ device_id: sensor_device_info_to_hass_device_info(device_info) for device_id, device_info in sensor_update.devices.items() }, entity_descriptions={ _device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[ (description.device_class, description.native_unit_of_measurement) ] for device_key, description in sensor_update.entity_descriptions.items() if description.device_class and description.native_unit_of_measurement }, entity_data={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value for device_key, sensor_values in sensor_update.entity_values.items() }, entity_names={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.name for device_key, sensor_values in sensor_update.entity_values.items() }, )
Return decryption function and length of key. Async friendly.
def setup_decrypt( key_encoder: type[RawEncoder | HexEncoder], ) -> Callable[[bytes, bytes], bytes]: """Return decryption function and length of key. Async friendly. """ def decrypt(ciphertext: bytes, key: bytes) -> bytes: """Decrypt ciphertext using key.""" return SecretBox(key, encoder=key_encoder).decrypt( ciphertext, encoder=Base64Encoder ) return decrypt
Return encryption function and length of key. Async friendly.
def setup_encrypt( key_encoder: type[RawEncoder | HexEncoder], ) -> Callable[[bytes, bytes], bytes]: """Return encryption function and length of key. Async friendly. """ def encrypt(ciphertext: bytes, key: bytes) -> bytes: """Encrypt ciphertext using key.""" return SecretBox(key, encoder=key_encoder).encrypt( ciphertext, encoder=Base64Encoder ) return encrypt
Decrypt encrypted payload.
def _decrypt_payload_helper( key: str | bytes, ciphertext: bytes, key_bytes: bytes, key_encoder: type[RawEncoder | HexEncoder], ) -> JsonValueType | None: """Decrypt encrypted payload.""" try: decrypt = setup_decrypt(key_encoder) except OSError: _LOGGER.warning("Ignoring encrypted payload because libsodium not installed") return None if key is None: _LOGGER.warning("Ignoring encrypted payload because no decryption key known") return None msg_bytes = decrypt(ciphertext, key_bytes) message = json_loads(msg_bytes) _LOGGER.debug("Successfully decrypted mobile_app payload") return message
Decrypt encrypted payload.
def decrypt_payload(key: str, ciphertext: bytes) -> JsonValueType | None: """Decrypt encrypted payload.""" return _decrypt_payload_helper(key, ciphertext, key.encode("utf-8"), HexEncoder)
Convert legacy encryption key.
def _convert_legacy_encryption_key(key: str) -> bytes: """Convert legacy encryption key.""" keylen = SecretBox.KEY_SIZE key_bytes = key.encode("utf-8") key_bytes = key_bytes[:keylen] return key_bytes.ljust(keylen, b"\0")
Decrypt encrypted payload.
def decrypt_payload_legacy(key: str, ciphertext: bytes) -> JsonValueType | None: """Decrypt encrypted payload.""" return _decrypt_payload_helper( key, ciphertext, _convert_legacy_encryption_key(key), RawEncoder )
Generate a context from a request.
def registration_context(registration: Mapping[str, Any]) -> Context: """Generate a context from a request.""" return Context(user_id=registration[CONF_USER_ID])
Return a Response with empty JSON object and a 200.
def empty_okay_response( headers: dict | None = None, status: HTTPStatus = HTTPStatus.OK ) -> Response: """Return a Response with empty JSON object and a 200.""" return Response( text="{}", status=status, content_type=CONTENT_TYPE_JSON, headers=headers )
Return an error Response.
def error_response( code: str, message: str, status: HTTPStatus = HTTPStatus.BAD_REQUEST, headers: dict | None = None, ) -> Response: """Return an error Response.""" return json_response( {"success": False, "error": {"code": code, "message": message}}, status=status, headers=headers, )
Return a registration without sensitive values.
def safe_registration(registration: dict) -> dict: """Return a registration without sensitive values.""" # Sensitive values: webhook_id, secret, cloudhook_url return { ATTR_APP_DATA: registration[ATTR_APP_DATA], ATTR_APP_ID: registration[ATTR_APP_ID], ATTR_APP_NAME: registration[ATTR_APP_NAME], ATTR_APP_VERSION: registration[ATTR_APP_VERSION], ATTR_DEVICE_NAME: registration[ATTR_DEVICE_NAME], ATTR_MANUFACTURER: registration[ATTR_MANUFACTURER], ATTR_MODEL: registration[ATTR_MODEL], ATTR_OS_VERSION: registration[ATTR_OS_VERSION], ATTR_SUPPORTS_ENCRYPTION: registration[ATTR_SUPPORTS_ENCRYPTION], }
Return a clean object containing things that should be saved.
def savable_state(hass: HomeAssistant) -> dict: """Return a clean object containing things that should be saved.""" return { DATA_DELETED_IDS: hass.data[DOMAIN][DATA_DELETED_IDS], }
Return a encrypted response if registration supports it.
def webhook_response( data: Any, *, registration: Mapping[str, Any], status: HTTPStatus = HTTPStatus.OK, headers: Mapping[str, str] | None = None, ) -> Response: """Return a encrypted response if registration supports it.""" json_data = json_bytes(data) if registration[ATTR_SUPPORTS_ENCRYPTION]: encrypt = setup_encrypt( HexEncoder if ATTR_NO_LEGACY_ENCRYPTION in registration else RawEncoder ) if ATTR_NO_LEGACY_ENCRYPTION in registration: key: bytes = registration[CONF_SECRET] else: key = _convert_legacy_encryption_key(registration[CONF_SECRET]) enc_data = encrypt(json_data, key).decode("utf-8") json_data = json_bytes({"encrypted": True, "encrypted_data": enc_data}) return Response( body=json_data, status=status, content_type=CONTENT_TYPE_JSON, headers=headers )
Return the device info for this registration.
def device_info(registration: dict) -> DeviceInfo: """Return the device info for this registration.""" return DeviceInfo( identifiers={(DOMAIN, registration[ATTR_DEVICE_ID])}, manufacturer=registration[ATTR_MANUFACTURER], model=registration[ATTR_MODEL], name=registration[ATTR_DEVICE_NAME], sw_version=registration[ATTR_OS_VERSION], )
Describe logbook events.
def async_describe_events( hass: HomeAssistant, async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None], ) -> None: """Describe logbook events.""" @callback def async_describe_zone_event(event: Event) -> dict[str, str]: """Describe mobile_app logbook event.""" data = event.data event_description = EVENT_TO_DESCRIPTION[event.event_type] zone_entity_id = data.get(ATTR_ZONE) source_device_name = data.get( ATTR_SOURCE_DEVICE_NAME, data.get(ATTR_SOURCE_DEVICE_ID) ) zone_name = None zone_icon = None if zone_entity_id and (zone_state := hass.states.get(zone_entity_id)): zone_name = zone_state.attributes.get(ATTR_FRIENDLY_NAME) zone_icon = zone_state.attributes.get(ATTR_ICON) description = { LOGBOOK_ENTRY_NAME: source_device_name, LOGBOOK_ENTRY_MESSAGE: f"{event_description} {zone_name or zone_entity_id}", LOGBOOK_ENTRY_ICON: zone_icon or "mdi:crosshairs-gps", } if zone_entity_id: description[LOGBOOK_ENTRY_ENTITY_ID] = zone_entity_id return description async_describe_event(DOMAIN, IOS_EVENT_ZONE_ENTERED, async_describe_zone_event) async_describe_event(DOMAIN, IOS_EVENT_ZONE_EXITED, async_describe_zone_event)
Return a dictionary of push enabled registrations.
def push_registrations(hass): """Return a dictionary of push enabled registrations.""" targets = {} for webhook_id, entry in hass.data[DOMAIN][DATA_CONFIG_ENTRIES].items(): if not supports_push(hass, webhook_id): continue targets[entry.data[ATTR_DEVICE_NAME]] = webhook_id return targets
Output rate limit log line at given level.
def log_rate_limits(hass, device_name, resp, level=logging.INFO): """Output rate limit log line at given level.""" if ATTR_PUSH_RATE_LIMITS not in resp: return rate_limits = resp[ATTR_PUSH_RATE_LIMITS] resetsAt = rate_limits[ATTR_PUSH_RATE_LIMITS_RESETS_AT] resetsAtTime = dt_util.parse_datetime(resetsAt) - dt_util.utcnow() rate_limit_msg = ( "mobile_app push notification rate limits for %s: " "%d sent, %d allowed, %d errors, " "resets in %s" ) _LOGGER.log( level, rate_limit_msg, device_name, rate_limits[ATTR_PUSH_RATE_LIMITS_SUCCESSFUL], rate_limits[ATTR_PUSH_RATE_LIMITS_MAXIMUM], rate_limits[ATTR_PUSH_RATE_LIMITS_ERRORS], str(resetsAtTime).split(".", maxsplit=1)[0], )
Get webhook ID from device ID.
def webhook_id_from_device_id(hass: HomeAssistant, device_id: str) -> str | None: """Get webhook ID from device ID.""" if DOMAIN not in hass.data: return None for cur_webhook_id, cur_device in hass.data[DOMAIN][DATA_DEVICES].items(): if cur_device.id == device_id: return cur_webhook_id return None
Return if push notifications is supported.
def supports_push(hass: HomeAssistant, webhook_id: str) -> bool: """Return if push notifications is supported.""" config_entry = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id] app_data = config_entry.data[ATTR_APP_DATA] return ( ATTR_PUSH_TOKEN in app_data and ATTR_PUSH_URL in app_data ) or ATTR_PUSH_WEBSOCKET_CHANNEL in app_data
Return the notify service for this webhook ID.
def get_notify_service(hass: HomeAssistant, webhook_id: str) -> str | None: """Return the notify service for this webhook ID.""" notify_service: MobileAppNotificationService = hass.data[DOMAIN][DATA_NOTIFY] for target_service, target_webhook_id in notify_service.registered_targets.items(): if target_webhook_id == webhook_id: return target_service return None
Decorate a webhook function with a schema.
def validate_schema(schema): """Decorate a webhook function with a schema.""" if isinstance(schema, dict): schema = vol.Schema(schema) def wrapper(func): """Wrap function so we validate schema.""" @wraps(func) async def validate_and_run(hass, config_entry, data): """Validate input and call handler.""" try: data = schema(data) except vol.Invalid as ex: err = vol.humanize.humanize_error(data, ex) _LOGGER.error("Received invalid webhook payload: %s", err) return empty_okay_response() return await func(hass, config_entry, data) return validate_and_run return wrapper
Return a cached template.
def _cached_template(template_str: str, hass: HomeAssistant) -> template.Template: """Return a cached template.""" return template.Template(template_str, hass)
Validate we only set state class for sensors.
def _validate_state_class_sensor(value: dict[str, Any]) -> dict[str, Any]: """Validate we only set state class for sensors.""" if ( ATTR_SENSOR_STATE_CLASS in value and value[ATTR_SENSOR_TYPE] != ATTR_SENSOR_TYPE_SENSOR ): raise vol.Invalid("state_class only allowed for sensors") return value
Return a unique sensor ID.
def _gen_unique_id(webhook_id: str, sensor_unique_id: str) -> str: """Return a unique sensor ID.""" return f"{webhook_id}_{sensor_unique_id}"
Return a unique sensor ID.
def _extract_sensor_unique_id(webhook_id: str, unique_id: str) -> str: """Return a unique sensor ID.""" return unique_id[len(webhook_id) + 1 :]
Set up the mobile app websocket API.
def async_setup_commands(hass): """Set up the mobile app websocket API.""" websocket_api.async_register_command(hass, handle_push_notification_channel) websocket_api.async_register_command(hass, handle_push_notification_confirm)