response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Get thumbnail URL.
def get_thumbnail_url_full( coordinator: RokuDataUpdateCoordinator, is_internal: bool, get_browse_image_url: GetBrowseImageUrlType, media_content_type: str, media_content_id: str, media_image_id: str | None = None, ) -> str | None: """Get thumbnail URL.""" if is_internal: if media_content_type == MediaType.APP and media_content_id: return coordinator.roku.app_icon_url(media_content_id) return None return get_browse_image_url( media_content_type, media_content_id, media_image_id, )
Create response payload for the provided media query.
def build_item_response( coordinator: RokuDataUpdateCoordinator, payload: dict, get_browse_image_url: GetBrowseImageUrlType, ) -> BrowseMedia | None: """Create response payload for the provided media query.""" search_id = payload["search_id"] search_type = payload["search_type"] thumbnail = None title = None media = None children_media_class = None if search_type == MediaType.APPS: title = "Apps" media = [ {"app_id": item.app_id, "title": item.name, "type": MediaType.APP} for item in coordinator.data.apps ] children_media_class = MediaClass.APP elif search_type == MediaType.CHANNELS: title = "TV Channels" media = [ { "channel_number": channel.number, "title": format_channel_name(channel.number, channel.name), "type": MediaType.CHANNEL, } for channel in coordinator.data.channels ] children_media_class = MediaClass.CHANNEL if title is None or media is None: return None return BrowseMedia( media_class=CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS.get( search_type, MediaClass.DIRECTORY ), media_content_id=search_id, media_content_type=search_type, title=title, can_play=search_type in PLAYABLE_MEDIA_TYPES and search_id, can_expand=True, children=[ item_payload(item, coordinator, get_browse_image_url) for item in media ], children_media_class=children_media_class, thumbnail=thumbnail, )
Create response payload for a single media item. Used by async_browse_media.
def item_payload( item: dict, coordinator: RokuDataUpdateCoordinator, get_browse_image_url: GetBrowseImageUrlType, ) -> BrowseMedia: """Create response payload for a single media item. Used by async_browse_media. """ thumbnail = None if "app_id" in item: media_content_type = MediaType.APP media_content_id = item["app_id"] thumbnail = get_browse_image_url(media_content_type, media_content_id, None) elif "channel_number" in item: media_content_type = MediaType.CHANNEL media_content_id = item["channel_number"] else: media_content_type = item["type"] media_content_id = "" title = item["title"] can_play = media_content_type in PLAYABLE_MEDIA_TYPES and media_content_id can_expand = media_content_type in EXPANDABLE_MEDIA_TYPES return BrowseMedia( title=title, media_class=CONTENT_TYPE_MEDIA_CLASS[media_content_type], media_content_type=media_content_type, media_content_id=media_content_id, can_play=can_play, can_expand=can_expand, thumbnail=thumbnail, )
Format a Roku Channel name.
def format_channel_name(channel_number: str, channel_name: str | None = None) -> str: """Format a Roku Channel name.""" if channel_name is not None and channel_name != "": return f"{channel_name} ({channel_number})" return channel_number
Decorate Roku calls to handle Roku exceptions.
def roku_exception_handler( ignore_timeout: bool = False, ) -> Callable[[_FuncType[_RokuEntityT, _P]], _ReturnFuncType[_RokuEntityT, _P]]: """Decorate Roku calls to handle Roku exceptions.""" def decorator( func: _FuncType[_RokuEntityT, _P], ) -> _ReturnFuncType[_RokuEntityT, _P]: @wraps(func) async def wrapper( self: _RokuEntityT, *args: _P.args, **kwargs: _P.kwargs ) -> None: try: await func(self, *args, **kwargs) except RokuConnectionTimeoutError as error: if not ignore_timeout: raise HomeAssistantError( "Timeout communicating with Roku API" ) from error except RokuConnectionError as error: raise HomeAssistantError("Error communicating with Roku API") from error except RokuError as error: raise HomeAssistantError("Invalid response from Roku API") from error return wrapper return decorator
Create a discovery object.
def _async_get_roomba_discovery() -> RoombaDiscovery: """Create a discovery object.""" discovery = RoombaDiscovery() discovery.amount_of_broadcasted_messages = MAX_NUM_DEVICES_TO_DISCOVER return discovery
Extract the blid from the hostname.
def _async_blid_from_hostname(hostname: str) -> str: """Extract the blid from the hostname.""" return hostname.split("-")[1].split(".")[0].upper()
Roomba report.
def roomba_reported_state(roomba: Roomba) -> dict[str, Any]: """Roomba report.""" return roomba.master_state.get("state", {}).get("reported", {})
Implement the websocket media browsing helper.
def browse_media(zone_id, roon_server, media_content_type=None, media_content_id=None): """Implement the websocket media browsing helper.""" try: _LOGGER.debug("browse_media: %s: %s", media_content_type, media_content_id) if media_content_type in [None, "library"]: return library_payload(roon_server, zone_id, media_content_id) except UnknownMediaType as err: raise BrowseError( f"Media not found: {media_content_type} / {media_content_id}" ) from err
Create response payload for a single media item.
def item_payload(roon_server, item, list_image_id): """Create response payload for a single media item.""" title = item["title"] if (subtitle := item.get("subtitle")) is None: display_title = title else: display_title = f"{title} ({subtitle})" image_id = item.get("image_key") or list_image_id image = None if image_id: image = roon_server.roonapi.get_image(image_id) media_content_id = item["item_key"] media_content_type = "library" hint = item.get("hint") if hint == "list": media_class = MediaClass.DIRECTORY can_expand = True elif hint == "action_list": media_class = MediaClass.PLAYLIST can_expand = False elif hint == "action": media_content_type = "track" media_class = MediaClass.TRACK can_expand = False else: # Roon API says to treat unknown as a list media_class = MediaClass.DIRECTORY can_expand = True _LOGGER.warning("Unknown hint %s - %s", title, hint) payload = { "title": display_title, "media_class": media_class, "media_content_id": media_content_id, "media_content_type": media_content_type, "can_play": True, "can_expand": can_expand, "thumbnail": image, } return BrowseMedia(**payload)
Create response payload for the library.
def library_payload(roon_server, zone_id, media_content_id): """Create response payload for the library.""" opts = { "hierarchy": "browse", "zone_or_output_id": zone_id, "count": ITEM_LIMIT, } # Roon starts browsing for a zone where it left off - so start from the top unless otherwise specified if media_content_id is None or media_content_id == "Explore": opts["pop_all"] = True content_id = "Explore" else: opts["item_key"] = media_content_id content_id = media_content_id result_header = roon_server.roonapi.browse_browse(opts) _LOGGER.debug("Result header %s", result_header) header = result_header["list"] title = header.get("title") if (subtitle := header.get("subtitle")) is None: list_title = title else: list_title = f"{title} ({subtitle})" total_count = header["count"] library_image_id = header.get("image_key") library_info = BrowseMedia( title=list_title, media_content_id=content_id, media_content_type="library", media_class=MediaClass.DIRECTORY, can_play=False, can_expand=True, children=[], ) result_detail = roon_server.roonapi.browse_load(opts) _LOGGER.debug("Result detail %s", result_detail) items = result_detail["items"] count = len(items) if count < total_count: _LOGGER.debug( "Exceeded limit of %d, loaded %d/%d", ITEM_LIMIT, count, total_count ) for item in items: if item.get("title") in EXCLUDE_ITEMS: continue entry = item_payload(roon_server, item, library_image_id) library_info.children.append(entry) return library_info
Set up the Route53 component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Route53 component.""" domain = config[DOMAIN][CONF_DOMAIN] records = config[DOMAIN][CONF_RECORDS] zone = config[DOMAIN][CONF_ZONE] aws_access_key_id = config[DOMAIN][CONF_ACCESS_KEY_ID] aws_secret_access_key = config[DOMAIN][CONF_SECRET_ACCESS_KEY] ttl = config[DOMAIN][CONF_TTL] def update_records_interval(now): """Set up recurring update.""" _update_route53( aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl ) def update_records_service(call: ServiceCall) -> None: """Set up service for manual trigger.""" _update_route53( aws_access_key_id, aws_secret_access_key, zone, domain, records, ttl ) track_time_interval(hass, update_records_interval, INTERVAL) hass.services.register(DOMAIN, "update_records", update_records_service) return True
Kill any previously running raspistill process..
def kill_raspistill(*args): """Kill any previously running raspistill process..""" with subprocess.Popen( ["killall", "raspistill"], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, close_fds=False, # required for posix_spawn ): pass
Set up the Raspberry Camera.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Raspberry Camera.""" # We only want this platform to be set up via discovery. # prevent initializing by erroneous platform config section in yaml conf if discovery_info is None: return if shutil.which("raspistill") is None: _LOGGER.error("'raspistill' was not found") return hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, kill_raspistill) setup_config = hass.data[DOMAIN] file_path = setup_config[CONF_FILE_PATH] def delete_temp_file(*args): """Delete the temporary file to prevent saving multiple temp images. Only used when no path is defined """ os.remove(file_path) # If no file path is defined, use a temporary file if file_path is None: with NamedTemporaryFile(suffix=".jpg", delete=False) as temp_file: file_path = temp_file.name setup_config[CONF_FILE_PATH] = file_path hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, delete_temp_file) # Check whether the file path has been whitelisted elif not hass.config.is_allowed_path(file_path): _LOGGER.error("'%s' is not a whitelisted directory", file_path) return add_entities([RaspberryCamera(setup_config)])
Set up the rpi_camera integration.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the rpi_camera integration.""" config_domain = config[DOMAIN] hass.data[DOMAIN] = { CONF_FILE_PATH: config_domain.get(CONF_FILE_PATH), CONF_HORIZONTAL_FLIP: config_domain.get(CONF_HORIZONTAL_FLIP), CONF_IMAGE_WIDTH: config_domain.get(CONF_IMAGE_WIDTH), CONF_IMAGE_HEIGHT: config_domain.get(CONF_IMAGE_HEIGHT), CONF_IMAGE_QUALITY: config_domain.get(CONF_IMAGE_QUALITY), CONF_IMAGE_ROTATION: config_domain.get(CONF_IMAGE_ROTATION), CONF_NAME: config_domain.get(CONF_NAME), CONF_OVERLAY_METADATA: config_domain.get(CONF_OVERLAY_METADATA), CONF_OVERLAY_TIMESTAMP: config_domain.get(CONF_OVERLAY_TIMESTAMP), CONF_TIMELAPSE: config_domain.get(CONF_TIMELAPSE), CONF_VERTICAL_FLIP: config_domain.get(CONF_VERTICAL_FLIP), } discovery.load_platform(hass, Platform.CAMERA, DOMAIN, {}, config) return True
Set up the RSS feed template component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the RSS feed template component.""" for feeduri, feedconfig in config[DOMAIN].items(): url = f"/api/rss_template/{feeduri}" requires_auth: bool = feedconfig["requires_api_password"] title: Template | None if (title := feedconfig.get("title")) is not None: title.hass = hass items: list[dict[str, Template]] = feedconfig["items"] for item in items: if "title" in item: item["title"].hass = hass if "description" in item: item["description"].hass = hass rss_view = RssView(url, requires_auth, title, items) hass.http.register_view(rss_view) return True
Set up the rtorrent sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the rtorrent sensors.""" url = config[CONF_URL] name = config[CONF_NAME] try: rtorrent = xmlrpc.client.ServerProxy(url) except (xmlrpc.client.ProtocolError, ConnectionRefusedError) as ex: _LOGGER.error("Connection to rtorrent daemon failed") raise PlatformNotReady from ex monitored_variables = config[CONF_MONITORED_VARIABLES] entities = [ RTorrentSensor(rtorrent, name, description) for description in SENSOR_TYPES if description.key in monitored_variables ] add_entities(entities)
Return a bytes/s measurement as a human readable string.
def format_speed(speed): """Return a bytes/s measurement as a human readable string.""" kb_spd = float(speed) / 1024 return round(kb_spd, 2 if kb_spd < 0.1 else 1)
Handle the websocket command.
def ws_get_settings( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] ) -> None: """Handle the websocket command.""" connection.send_result( msg["id"], {CONF_STUN_SERVER: hass.data.get(DOMAIN, {}).get(CONF_STUN_SERVER, "")}, )
Add new tracker entities from the router.
def add_new_entities(coordinator, async_add_entities, tracked): """Add new tracker entities from the router.""" new_tracked = [] for mac in coordinator.data[KEY_SYS_CLIENTS]: if mac in tracked: continue device = coordinator.data[KEY_SYS_CLIENTS][mac] _LOGGER.debug("adding new device: [%s] %s", mac, device[API_CLIENT_HOSTNAME]) new_tracked.append( RuckusUnleashedDevice(coordinator, mac, device[API_CLIENT_HOSTNAME]) ) tracked.add(mac) async_add_entities(new_tracked)
Restore clients that are not a part of active clients list.
def restore_entities( registry: er.EntityRegistry, coordinator: RuckusUnleashedDataUpdateCoordinator, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, tracked: set[str], ) -> None: """Restore clients that are not a part of active clients list.""" missing: list[RuckusUnleashedDevice] = [] for entity in registry.entities.get_entries_for_config_entry_id(entry.entry_id): if ( entity.platform == DOMAIN and entity.unique_id not in coordinator.data[KEY_SYS_CLIENTS] ): missing.append( RuckusUnleashedDevice( coordinator, entity.unique_id, entity.original_name ) ) tracked.add(entity.unique_id) _LOGGER.debug("added %d missing devices", len(missing)) async_add_entities(missing)
Set up the Russound RNET platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Russound RNET platform.""" host = config.get(CONF_HOST) port = config.get(CONF_PORT) if host is None or port is None: _LOGGER.error("Invalid config. Expected %s and %s", CONF_HOST, CONF_PORT) return russ = russound.Russound(host, port) russ.connect() sources = [source["name"] for source in config[CONF_SOURCES]] if russ.is_connected(): for zone_id, extra in config[CONF_ZONES].items(): add_entities( [RussoundRNETDevice(hass, russ, sources, zone_id, extra)], True ) else: _LOGGER.error("Not connected to %s:%s", host, port)
Convert a device key to an entity key.
def _device_key_to_bluetooth_entity_key( device_key: DeviceKey, ) -> PassiveBluetoothEntityKey: """Convert a device key to an entity key.""" return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
Convert a sensor update to a bluetooth data update.
def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, ) -> PassiveBluetoothDataUpdate: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ device_id: sensor_device_info_to_hass_device_info(device_info) for device_id, device_info in sensor_update.devices.items() }, entity_descriptions={ _device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[ _to_sensor_key(description) ] for device_key, description in sensor_update.entity_descriptions.items() if _to_sensor_key(description) in SENSOR_DESCRIPTIONS }, entity_data={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value for device_key, sensor_values in sensor_update.entity_values.items() }, entity_names={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.name for device_key, sensor_values in sensor_update.entity_values.items() }, )
Connect scanner and start polling.
def async_connect_scanner( hass: HomeAssistant, entry: ConfigEntry, coordinator: RuuviGatewayUpdateCoordinator, ) -> tuple[RuuviGatewayScanner, CALLBACK_TYPE]: """Connect scanner and start polling.""" assert entry.unique_id is not None source = str(entry.unique_id) _LOGGER.debug( "%s [%s]: Connecting scanner", entry.title, source, ) scanner = RuuviGatewayScanner( scanner_id=source, name=entry.title, coordinator=coordinator, ) unload_callbacks = [ async_register_scanner(hass, scanner), scanner.async_setup(), scanner.start_polling(), ] @callback def _async_unload() -> None: for unloader in unload_callbacks: unloader() return (scanner, _async_unload)
Return a config schema with a default host.
def get_config_schema_with_default_host(host: str) -> vol.Schema: """Return a config schema with a default host.""" return CONFIG_SCHEMA.extend({vol.Required(CONF_HOST, default=host): str})
Get the entry ID related to a service call (by device ID).
def async_get_entry_id_for_service_call(hass: HomeAssistant, call: ServiceCall) -> str: """Get the entry ID related to a service call (by device ID).""" call_data_api_key = call.data[ATTR_API_KEY] for entry in hass.config_entries.async_entries(DOMAIN): if entry.data[ATTR_API_KEY] == call_data_api_key: return entry.entry_id raise ValueError(f"No api for API key: {call_data_api_key}")
Update device identifiers to new identifiers.
def update_device_identifiers(hass: HomeAssistant, entry: ConfigEntry): """Update device identifiers to new identifiers.""" device_registry = async_get(hass) device_entry = device_registry.async_get_device(identifiers={(DOMAIN, DOMAIN)}) if device_entry and entry.entry_id in device_entry.config_entries: new_identifiers = {(DOMAIN, entry.entry_id)} _LOGGER.debug( "Updating device id <%s> with new identifiers <%s>", device_entry.id, new_identifiers, ) device_registry.async_update_device( device_entry.id, new_identifiers=new_identifiers )
Add a listener that fires repetitively and increases the interval when failed.
def async_track_time_interval_backoff( hass: HomeAssistant, action: Callable[[], Coroutine[Any, Any, bool]] ) -> CALLBACK_TYPE: """Add a listener that fires repetitively and increases the interval when failed.""" remove = None interval = MIN_INTERVAL async def interval_listener(now: datetime | None = None) -> None: """Handle elapsed interval with backoff.""" nonlocal interval, remove try: if await action(): interval = MIN_INTERVAL else: interval = min(interval * 2, MAX_INTERVAL) finally: remove = async_call_later(hass, interval, interval_listener) hass.async_create_task(interval_listener()) def remove_listener() -> None: """Remove interval listener.""" if remove: remove() return remove_listener
Extract the mac address from the device info.
def mac_from_device_info(info: dict[str, Any]) -> str | None: """Extract the mac address from the device info.""" if wifi_mac := info.get("device", {}).get("wifiMac"): return format_mac(wifi_mac) return None
H and J models need pairing with PIN.
def model_requires_encryption(model: str | None) -> bool: """H and J models need pairing with PIN.""" return model is not None and len(model) > 4 and model[4] in ("H", "J")
Return True if the config entry information is complete. If we do not have an ssdp location we consider it complete as some TVs will not support SSDP/UPNP
def _entry_is_complete( entry: ConfigEntry, ssdp_rendering_control_location: str | None, ssdp_main_tv_agent_location: str | None, ) -> bool: """Return True if the config entry information is complete. If we do not have an ssdp location we consider it complete as some TVs will not support SSDP/UPNP """ return bool( entry.unique_id and entry.data.get(CONF_MAC) and ( not ssdp_rendering_control_location or entry.data.get(CONF_SSDP_RENDERING_CONTROL_LOCATION) ) and ( not ssdp_main_tv_agent_location or entry.data.get(CONF_SSDP_MAIN_TV_AGENT_LOCATION) ) )
Check if two macs are the same but formatted incorrectly.
def _mac_is_same_with_incorrect_formatting( current_unformatted_mac: str, formatted_mac: str ) -> bool: """Check if two macs are the same but formatted incorrectly.""" current_formatted_mac = format_mac(current_unformatted_mac) return ( current_formatted_mac == formatted_mac and current_unformatted_mac != current_formatted_mac )
Get Device Entry from Device Registry by device ID. Raises ValueError if device ID is invalid.
def async_get_device_entry_by_device_id( hass: HomeAssistant, device_id: str ) -> DeviceEntry: """Get Device Entry from Device Registry by device ID. Raises ValueError if device ID is invalid. """ device_reg = dr.async_get(hass) if (device := device_reg.async_get(device_id)) is None: raise ValueError(f"Device {device_id} is not a valid {DOMAIN} device.") return device
Get device ID from an entity ID. Raises ValueError if entity or device ID is invalid.
def async_get_device_id_from_entity_id(hass: HomeAssistant, entity_id: str) -> str: """Get device ID from an entity ID. Raises ValueError if entity or device ID is invalid. """ ent_reg = er.async_get(hass) entity_entry = ent_reg.async_get(entity_id) if ( entity_entry is None or entity_entry.device_id is None or entity_entry.platform != DOMAIN ): raise ValueError(f"Entity {entity_id} is not a valid {DOMAIN} entity.") return entity_entry.device_id
Get SamsungTVBridge from Device Registry by device entry. Raises ValueError if client is not found.
def async_get_client_by_device_entry( hass: HomeAssistant, device: DeviceEntry ) -> SamsungTVBridge: """Get SamsungTVBridge from Device Registry by device entry. Raises ValueError if client is not found. """ domain_data: dict[str, SamsungTVBridge] = hass.data[DOMAIN] for config_entry_id in device.config_entries: if bridge := domain_data.get(config_entry_id): return bridge raise ValueError( f"Device {device.id} is not from an existing {DOMAIN} config entry" )
Return trigger platform.
def _get_trigger_platform(config: ConfigType) -> TriggerProtocol: """Return trigger platform.""" platform_split = config[CONF_PLATFORM].split(".", maxsplit=1) if len(platform_split) < 2 or platform_split[1] not in TRIGGERS: raise ValueError(f"Unknown Samsung TV trigger platform {config[CONF_PLATFORM]}") return cast(TriggerProtocol, TRIGGERS[platform_split[1]])
Get device bridge.
def _async_get_device_bridge( hass: HomeAssistant, data: dict[str, Any] ) -> SamsungTVBridge: """Get device bridge.""" return SamsungTVBridge.get_bridge( hass, data[CONF_METHOD], data[CONF_HOST], data[CONF_PORT], data, )
Return data for a turn on trigger.
def async_get_turn_on_trigger(device_id: str) -> dict[str, str]: """Return data for a turn on trigger.""" return { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_TYPE: PLATFORM_TYPE, }
Check if alarm code must be configured.
def is_alarm_code_necessary(value): """Check if alarm code must be configured.""" if value.get(CONF_SWITCHABLE_OUTPUTS) and CONF_DEVICE_CODE not in value: raise vol.Invalid("You need to specify alarm code to use switchable_outputs") return value
Validate platform in config for homeassistant domain.
def _hass_domain_validator(config: dict[str, Any]) -> dict[str, Any]: """Validate platform in config for homeassistant domain.""" if CONF_PLATFORM not in config: config = {CONF_PLATFORM: HA_DOMAIN, STATES: config} return config
Validate it is a valid platform.
def _platform_validator(config: dict[str, Any]) -> dict[str, Any]: """Validate it is a valid platform.""" platform_name = config[CONF_PLATFORM] try: platform = importlib.import_module( f"homeassistant.components.{platform_name}.scene" ) except ImportError: raise vol.Invalid("Invalid platform specified") from None if not hasattr(platform, "PLATFORM_SCHEMA"): return config return platform.PLATFORM_SCHEMA(config)
Validate the schedule of time ranges. Ensure they have no overlap and the end time is greater than the start time.
def valid_schedule(schedule: list[dict[str, str]]) -> list[dict[str, str]]: """Validate the schedule of time ranges. Ensure they have no overlap and the end time is greater than the start time. """ # Emtpty schedule is valid if not schedule: return schedule # Sort the schedule by start times schedule = sorted(schedule, key=lambda time_range: time_range[CONF_FROM]) # Check if the start time of the next event is before the end time of the previous event previous_to = None for time_range in schedule: if time_range[CONF_FROM] >= time_range[CONF_TO]: raise vol.Invalid( f"Invalid time range, from {time_range[CONF_FROM]} is after" f" {time_range[CONF_TO]}" ) # Check if the from time of the event is after the to time of the previous event if previous_to is not None and previous_to > time_range[CONF_FROM]: # type: ignore[unreachable] raise vol.Invalid("Overlapping times found in schedule") previous_to = time_range[CONF_TO] return schedule
Convert 24:00 and 24:00:00 to time.max.
def deserialize_to_time(value: Any) -> Any: """Convert 24:00 and 24:00:00 to time.max.""" if not isinstance(value, str): return cv.time(value) parts = value.split(":") if len(parts) < 2: return cv.time(value) hour = int(parts[0]) minute = int(parts[1]) if hour == 24 and minute == 0: return time.max return cv.time(value)
Convert time.max to 24:00:00.
def serialize_to_time(value: Any) -> Any: """Convert time.max to 24:00:00.""" if value == time.max: return "24:00:00" return vol.Coerce(str)(value)
Authenticate with the Schlage API.
def _authenticate(username: str, password: str) -> tuple[str | None, dict[str, str]]: """Authenticate with the Schlage API.""" user_id = None errors: dict[str, str] = {} try: auth = pyschlage.Auth(username, password) auth.authenticate() except NotAuthorizedError: errors["base"] = "invalid_auth" except Exception: # pylint: disable=broad-except LOGGER.exception("Unknown error") errors["base"] = "unknown" else: # The user_id property will make a blocking call if it's not already # cached. To avoid blocking the event loop, we read it here. user_id = auth.user_id return user_id, errors
Set up the Schluter component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Schluter component.""" _LOGGER.debug("Starting setup of schluter") conf = config[DOMAIN] api_http_session = Session() api = Api(timeout=API_TIMEOUT, http_session=api_http_session) authenticator = Authenticator( api, conf.get(CONF_USERNAME), conf.get(CONF_PASSWORD), session_id_cache_file=hass.config.path(SCHLUTER_CONFIG_FILE), ) authentication = None try: authentication = authenticator.authenticate() except RequestException as ex: _LOGGER.error("Unable to connect to Schluter service: %s", ex) return False state = authentication.state if state == AuthenticationState.AUTHENTICATED: hass.data[DOMAIN] = { DATA_SCHLUTER_API: api, DATA_SCHLUTER_SESSION: authentication.session_id, } discovery.load_platform(hass, Platform.CLIMATE, DOMAIN, {}, config) return True if state == AuthenticationState.BAD_PASSWORD: _LOGGER.error("Invalid password provided") return False if state == AuthenticationState.BAD_EMAIL: _LOGGER.error("Invalid email provided") return False _LOGGER.error("Unknown set up error: %s", state) return False
Short version of the mac as seen in the app.
def short_mac(mac): """Short version of the mac as seen in the app.""" return "-".join(mac.split(":")[3:]).upper()
Derive the gateway name from the mac.
def name_for_mac(mac): """Derive the gateway name from the mac.""" return f"Pentair: {short_mac(mac)}"
Set up services for the ScreenLogic integration.
def async_load_screenlogic_services(hass: HomeAssistant): """Set up services for the ScreenLogic integration.""" async def extract_screenlogic_config_entry_ids(service_call: ServiceCall): if not ( screenlogic_entry_ids := await async_extract_config_entry_ids( hass, service_call ) ): raise ServiceValidationError( f"Failed to call service '{service_call.service}'. Config entry for " "target not found" ) return screenlogic_entry_ids async def get_coordinators( service_call: ServiceCall, ) -> list[ScreenlogicDataUpdateCoordinator]: entry_ids: set[str] if entry_id := service_call.data.get(ATTR_CONFIG_ENTRY): entry_ids = {entry_id} else: ir.async_create_issue( hass, DOMAIN, "service_target_deprecation", breaks_in_ha_version="2024.8.0", is_fixable=True, is_persistent=True, severity=ir.IssueSeverity.WARNING, translation_key="service_target_deprecation", ) entry_ids = await extract_screenlogic_config_entry_ids(service_call) coordinators: list[ScreenlogicDataUpdateCoordinator] = [] for entry_id in entry_ids: config_entry: ConfigEntry | None = hass.config_entries.async_get_entry( entry_id ) if not config_entry: raise ServiceValidationError( f"Failed to call service '{service_call.service}'. Config entry " f"'{entry_id}' not found" ) if not config_entry.domain == DOMAIN: raise ServiceValidationError( f"Failed to call service '{service_call.service}'. Config entry " f"'{entry_id}' is not a {DOMAIN} config" ) if not config_entry.state == ConfigEntryState.LOADED: raise ServiceValidationError( f"Failed to call service '{service_call.service}'. Config entry " f"'{entry_id}' not loaded" ) coordinators.append(hass.data[DOMAIN][entry_id]) return coordinators async def async_set_color_mode(service_call: ServiceCall) -> None: color_num = SUPPORTED_COLOR_MODES[service_call.data[ATTR_COLOR_MODE]] coordinator: ScreenlogicDataUpdateCoordinator for coordinator in await get_coordinators(service_call): _LOGGER.debug( "Service %s called on %s with mode %s", SERVICE_SET_COLOR_MODE, coordinator.gateway.name, color_num, ) try: await coordinator.gateway.async_set_color_lights(color_num) # Debounced refresh to catch any secondary changes in the device await coordinator.async_request_refresh() except ScreenLogicError as error: raise HomeAssistantError(error) from error async def async_set_super_chlor( service_call: ServiceCall, is_on: bool, runtime: int | None = None, ) -> None: coordinator: ScreenlogicDataUpdateCoordinator for coordinator in await get_coordinators(service_call): if EQUIPMENT_FLAG.CHLORINATOR not in coordinator.gateway.equipment_flags: raise ServiceValidationError( f"Equipment configuration for {coordinator.gateway.name} does not" f" support {service_call.service}" ) rt_log = f" with runtime {runtime}" if runtime else "" _LOGGER.debug( "Service %s called on %s%s", service_call.service, coordinator.gateway.name, rt_log, ) try: await coordinator.gateway.async_set_scg_config( super_chlor_timer=runtime, super_chlorinate=is_on ) # Debounced refresh to catch any secondary changes in the device await coordinator.async_request_refresh() except ScreenLogicError as error: raise HomeAssistantError(error) from error async def async_start_super_chlor(service_call: ServiceCall) -> None: runtime = service_call.data[ATTR_RUNTIME] await async_set_super_chlor(service_call, True, runtime) async def async_stop_super_chlor(service_call: ServiceCall) -> None: await async_set_super_chlor(service_call, False) hass.services.async_register( DOMAIN, SERVICE_SET_COLOR_MODE, async_set_color_mode, SET_COLOR_MODE_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_START_SUPER_CHLORINATION, async_start_super_chlor, TURN_ON_SUPER_CHLOR_SCHEMA, ) hass.services.async_register( DOMAIN, SERVICE_STOP_SUPER_CHLORINATION, async_stop_super_chlor, BASE_SERVICE_SCHEMA, )
Generate new unique_id for a screenlogic entity from specified parameters.
def generate_unique_id(*args: str | int | None) -> str: """Generate new unique_id for a screenlogic entity from specified parameters.""" _LOGGER.debug("gen_uid called with %s", args) if len(args) == 3: if args[2] in SHARED_VALUES: if args[1] is not None and (isinstance(args[1], int) or args[1].isdigit()): return f"{args[0]}_{args[1]}_{args[2]}" return f"{args[0]}_{args[2]}" return f"{args[2]}" return f"{args[1]}"
Return equivalent Home Assistant unit of measurement if exists.
def get_ha_unit(sl_unit) -> str: """Return equivalent Home Assistant unit of measurement if exists.""" if (ha_unit := SL_UNIT_TO_HA_UNIT.get(sl_unit)) is not None: return ha_unit return sl_unit
Remove excluded entity if it exists.
def cleanup_excluded_entity( coordinator: ScreenlogicDataUpdateCoordinator, platform_domain: str, data_path: ScreenLogicDataPath, ) -> None: """Remove excluded entity if it exists.""" assert coordinator.config_entry entity_registry = er.async_get(coordinator.hass) unique_id = f"{coordinator.config_entry.unique_id}_{generate_unique_id(*data_path)}" if entity_id := entity_registry.async_get_entity_id( platform_domain, SL_DOMAIN, unique_id ): _LOGGER.debug( "Removing existing entity '%s' per data inclusion rule", entity_id ) entity_registry.async_remove(entity_id)
Return True if any script references the blueprint.
def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool: """Return True if any script references the blueprint.""" from . import scripts_with_blueprint # pylint: disable=import-outside-toplevel return len(scripts_with_blueprint(hass, blueprint_path)) > 0
Get script blueprints.
def async_get_blueprints(hass: HomeAssistant) -> DomainBlueprints: """Get script blueprints.""" return DomainBlueprints( hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_scripts )
Describe logbook events.
def async_describe_events(hass, async_describe_event): """Describe logbook events.""" @callback def async_describe_logbook_event(event): """Describe the logbook event.""" data = event.data return { LOGBOOK_ENTRY_NAME: data.get(ATTR_NAME), LOGBOOK_ENTRY_MESSAGE: "started", LOGBOOK_ENTRY_ENTITY_ID: data.get(ATTR_ENTITY_ID), LOGBOOK_ENTRY_CONTEXT_ID: event.context_id, } async_describe_event(DOMAIN, EVENT_SCRIPT_STARTED, async_describe_logbook_event)
Trace execution of a script.
def trace_script( hass: HomeAssistant, item_id: str, config: dict[str, Any], blueprint_inputs: dict[str, Any], context: Context, trace_config: dict[str, Any], ) -> Iterator[ScriptTrace]: """Trace execution of a script.""" trace = ScriptTrace(item_id, config, blueprint_inputs, context) async_store_trace(hass, trace, trace_config[CONF_STORED_TRACES]) try: yield trace except Exception as ex: if item_id: trace.set_error(ex) raise finally: if item_id: trace.finished()
Return if the script is on based on the statemachine.
def is_on(hass, entity_id): """Return if the script is on based on the statemachine.""" return hass.states.is_state(entity_id, STATE_ON)
Return all scripts that reference the x.
def _scripts_with_x( hass: HomeAssistant, referenced_id: str, property_name: str ) -> list[str]: """Return all scripts that reference the x.""" if DOMAIN not in hass.data: return [] component: EntityComponent[BaseScriptEntity] = hass.data[DOMAIN] return [ script_entity.entity_id for script_entity in component.entities if referenced_id in getattr(script_entity, property_name) ]
Return all x in a script.
def _x_in_script(hass: HomeAssistant, entity_id: str, property_name: str) -> list[str]: """Return all x in a script.""" if DOMAIN not in hass.data: return [] component: EntityComponent[BaseScriptEntity] = hass.data[DOMAIN] if (script_entity := component.get_entity(entity_id)) is None: return [] return list(getattr(script_entity, property_name))
Return all scripts that reference the entity.
def scripts_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all scripts that reference the entity.""" return _scripts_with_x(hass, entity_id, "referenced_entities")
Return all entities in script.
def entities_in_script(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all entities in script.""" return _x_in_script(hass, entity_id, "referenced_entities")
Return all scripts that reference the device.
def scripts_with_device(hass: HomeAssistant, device_id: str) -> list[str]: """Return all scripts that reference the device.""" return _scripts_with_x(hass, device_id, "referenced_devices")
Return all devices in script.
def devices_in_script(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all devices in script.""" return _x_in_script(hass, entity_id, "referenced_devices")
Return all scripts that reference the area.
def scripts_with_area(hass: HomeAssistant, area_id: str) -> list[str]: """Return all scripts that reference the area.""" return _scripts_with_x(hass, area_id, "referenced_areas")
Return all areas in a script.
def areas_in_script(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all areas in a script.""" return _x_in_script(hass, entity_id, "referenced_areas")
Return all scripts that reference the floor.
def scripts_with_floor(hass: HomeAssistant, floor_id: str) -> list[str]: """Return all scripts that reference the floor.""" return _scripts_with_x(hass, floor_id, "referenced_floors")
Return all floors in a script.
def floors_in_script(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all floors in a script.""" return _x_in_script(hass, entity_id, "referenced_floors")
Return all scripts that reference the label.
def scripts_with_label(hass: HomeAssistant, label_id: str) -> list[str]: """Return all scripts that reference the label.""" return _scripts_with_x(hass, label_id, "referenced_labels")
Return all labels in a script.
def labels_in_script(hass: HomeAssistant, entity_id: str) -> list[str]: """Return all labels in a script.""" return _x_in_script(hass, entity_id, "referenced_labels")
Return all scripts that reference the blueprint.
def scripts_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[str]: """Return all scripts that reference the blueprint.""" if DOMAIN not in hass.data: return [] component: EntityComponent[BaseScriptEntity] = hass.data[DOMAIN] return [ script_entity.entity_id for script_entity in component.entities if script_entity.referenced_blueprint == blueprint_path ]
Return the blueprint the script is based on or None.
def blueprint_in_script(hass: HomeAssistant, entity_id: str) -> str | None: """Return the blueprint the script is based on or None.""" if DOMAIN not in hass.data: return None component: EntityComponent[BaseScriptEntity] = hass.data[DOMAIN] if (script_entity := component.get_entity(entity_id)) is None: return None return script_entity.referenced_blueprint
Get script config.
def websocket_config( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: """Get script config.""" component: EntityComponent[BaseScriptEntity] = hass.data[DOMAIN] script = component.get_entity(msg["entity_id"]) if script is None: connection.send_error( msg["id"], websocket_api.const.ERR_NOT_FOUND, "Entity not found" ) return connection.send_result( msg["id"], { "config": script.raw_config, }, )
Set up the SCSGate cover.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the SCSGate cover.""" devices = config.get(CONF_DEVICES) covers = [] logger = logging.getLogger(__name__) scsgate = hass.data[DOMAIN] if devices: for entity_info in devices.values(): if entity_info[CONF_SCS_ID] in scsgate.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[CONF_SCS_ID] logger.info("Adding %s scsgate.cover", name) cover = SCSGateCover( name=name, scs_id=scs_id, logger=logger, scsgate=scsgate ) scsgate.add_device(cover) covers.append(cover) add_entities(covers)
Set up the SCSGate switches.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the SCSGate switches.""" devices = config.get(CONF_DEVICES) lights = [] logger = logging.getLogger(__name__) scsgate = hass.data[DOMAIN] if devices: for entity_info in devices.values(): if entity_info[CONF_SCS_ID] in scsgate.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[CONF_SCS_ID] logger.info("Adding %s scsgate.light", name) light = SCSGateLight( name=name, scs_id=scs_id, logger=logger, scsgate=scsgate ) lights.append(light) add_entities(lights) scsgate.add_devices_to_register(lights)
Set up the SCSGate switches.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the SCSGate switches.""" logger = logging.getLogger(__name__) scsgate = hass.data[DOMAIN] _setup_traditional_switches( logger=logger, config=config, scsgate=scsgate, add_entities_callback=add_entities, ) _setup_scenario_switches(logger=logger, config=config, scsgate=scsgate, hass=hass)
Add traditional SCSGate switches.
def _setup_traditional_switches(logger, config, scsgate, add_entities_callback): """Add traditional SCSGate switches.""" traditional = config.get(CONF_TRADITIONAL) switches = [] if traditional: for entity_info in traditional.values(): if entity_info[CONF_SCS_ID] in scsgate.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[CONF_SCS_ID] logger.info("Adding %s scsgate.traditional_switch", name) switch = SCSGateSwitch( name=name, scs_id=scs_id, logger=logger, scsgate=scsgate ) switches.append(switch) add_entities_callback(switches) scsgate.add_devices_to_register(switches)
Add only SCSGate scenario switches.
def _setup_scenario_switches(logger, config, scsgate, hass): """Add only SCSGate scenario switches.""" if scenario := config.get(CONF_SCENARIO): for entity_info in scenario.values(): if entity_info[CONF_SCS_ID] in scsgate.devices: continue name = entity_info[CONF_NAME] scs_id = entity_info[CONF_SCS_ID] logger.info("Adding %s scsgate.scenario_switch", name) switch = SCSGateScenarioSwitch( name=name, scs_id=scs_id, logger=logger, hass=hass ) scsgate.add_device(switch)
Set up the SCSGate component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the SCSGate component.""" device = config[DOMAIN][CONF_DEVICE] scsgate = None try: scsgate = SCSGate(device=device, logger=_LOGGER) scsgate.start() except Exception as exception: # pylint: disable=broad-except _LOGGER.error("Cannot setup SCSGate component: %s", exception) return False def stop_monitor(event): """Stop the SCSGate.""" _LOGGER.info("Stopping SCSGate monitor thread") scsgate.stop() hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_monitor) hass.data[DOMAIN] = scsgate return True
Handle search.
def websocket_search_related( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: """Handle search.""" searcher = Searcher(hass, get_entity_sources(hass)) connection.send_result( msg["id"], searcher.async_search(msg["item_type"], msg["item_id"]) )
Calculate the current season.
def get_season( current_date: date, hemisphere: str, season_tracking_type: str ) -> str | None: """Calculate the current season.""" if hemisphere == "equator": return None if season_tracking_type == TYPE_ASTRONOMICAL: spring_start = ephem.next_equinox(str(current_date.year)).datetime() summer_start = ephem.next_solstice(str(current_date.year)).datetime() autumn_start = ephem.next_equinox(spring_start).datetime() winter_start = ephem.next_solstice(summer_start).datetime() else: spring_start = datetime(2017, 3, 1).replace(year=current_date.year) summer_start = spring_start.replace(month=6) autumn_start = spring_start.replace(month=9) winter_start = spring_start.replace(month=12) season = STATE_WINTER if spring_start <= current_date < summer_start: season = STATE_SPRING elif summer_start <= current_date < autumn_start: season = STATE_SUMMER elif autumn_start <= current_date < winter_start: season = STATE_AUTUMN # If user is located in the southern hemisphere swap the season if hemisphere == NORTHERN: return season return HEMISPHERE_SEASON_SWAP.get(season)
Create a function to test a device condition.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[CONF_ENTITY_ID]) @callback def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" return condition.state( hass, entity_id, config[CONF_OPTION], config.get(CONF_FOR) ) return test_is_state
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool: """Test if state significantly changed.""" return old_state != new_state
Get the SendGrid notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> SendgridNotificationService: """Get the SendGrid notification service.""" return SendgridNotificationService(config)
Convert sense icon to mdi icon.
def sense_to_mdi(sense_icon): """Convert sense icon to mdi icon.""" return "mdi:{}".format(MDI_ICONS.get(sense_icon, "power-plug"))
Convert sense icon to mdi icon.
def sense_to_mdi(sense_icon): """Convert sense icon to mdi icon.""" return "mdi:{}".format(MDI_ICONS.get(sense_icon, "power-plug"))
Decorate api calls.
def async_handle_api_call( function: Callable[Concatenate[_T, _P], Coroutine[Any, Any, Any]], ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, Any]]: """Decorate api calls.""" async def wrap_api_call(entity: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: """Wrap services for api calls.""" res: bool = False if TYPE_CHECKING: assert isinstance(entity.name, str) try: async with asyncio.timeout(TIMEOUT): res = await function(entity, *args, **kwargs) except SENSIBO_ERRORS as err: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="service_raised", translation_placeholders={"error": str(err), "name": entity.name}, ) from err LOGGER.debug("Result %s for entity %s with arguments %s", res, entity, kwargs) if res is not True: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="service_result_not_true", translation_placeholders={"name": entity.name}, ) if ( isinstance(key := kwargs.get("key"), str) and (value := kwargs.get("value")) is not None ): setattr(entity.device_data, key, value) LOGGER.debug("Debug check key %s is now %s", key, value) entity.async_write_ha_state() await entity.coordinator.async_request_refresh() return wrap_api_call
Convert a device key to an entity key.
def _device_key_to_bluetooth_entity_key( device_key: DeviceKey, ) -> PassiveBluetoothEntityKey: """Convert a device key to an entity key.""" return PassiveBluetoothEntityKey(device_key.key, device_key.device_id)
Convert a sensor update to a bluetooth data update.
def sensor_update_to_bluetooth_data_update( sensor_update: SensorUpdate, ) -> PassiveBluetoothDataUpdate: """Convert a sensor update to a bluetooth data update.""" return PassiveBluetoothDataUpdate( devices={ device_id: sensor_device_info_to_hass_device_info(device_info) for device_id, device_info in sensor_update.devices.items() }, entity_descriptions={ _device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[ _to_sensor_key(description) ] for device_key, description in sensor_update.entity_descriptions.items() if _to_sensor_key(description) in SENSOR_DESCRIPTIONS }, entity_data={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value for device_key, sensor_values in sensor_update.entity_values.items() }, entity_names={ _device_key_to_bluetooth_entity_key(device_key): sensor_values.name for device_key, sensor_values in sensor_update.entity_values.items() }, )
Evaluate state based on configuration.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Evaluate state based on configuration.""" numeric_state_config = { CONF_CONDITION: "numeric_state", CONF_ENTITY_ID: config[CONF_ENTITY_ID], } if CONF_ABOVE in config: numeric_state_config[CONF_ABOVE] = config[CONF_ABOVE] if CONF_BELOW in config: numeric_state_config[CONF_BELOW] = config[CONF_BELOW] numeric_state_config = cv.NUMERIC_STATE_CONDITION_SCHEMA(numeric_state_config) numeric_state_config = condition.numeric_state_validate_config( hass, numeric_state_config ) return condition.async_numeric_state_from_config(numeric_state_config)
Describe group on off states.
def async_describe_on_off_states( hass: HomeAssistant, registry: "GroupIntegrationRegistry" ) -> None: """Describe group on off states.""" registry.exclude_domain(DOMAIN)
Parse datetime string to a data or datetime.
def async_parse_date_datetime( value: str, entity_id: str, device_class: SensorDeviceClass | str | None ) -> datetime | date | None: """Parse datetime string to a data or datetime.""" if device_class == SensorDeviceClass.TIMESTAMP: if (parsed_timestamp := dt_util.parse_datetime(value)) is None: _LOGGER.warning("%s rendered invalid timestamp: %s", entity_id, value) return None if parsed_timestamp.tzinfo is None: _LOGGER.warning( "%s rendered timestamp without timezone: %s", entity_id, value ) return None return parsed_timestamp # Date device class if (parsed_date := dt_util.parse_date(value)) is not None: return parsed_date _LOGGER.warning("%s rendered invalid date %s", entity_id, value) return None
Get the current state of all sensors for which to compile statistics.
def _get_sensor_states(hass: HomeAssistant) -> list[State]: """Get the current state of all sensors for which to compile statistics.""" instance = get_instance(hass) # We check for state class first before calling the filter # function as the filter function is much more expensive # than checking the state class return [ state for state in hass.states.all(DOMAIN) if (state_class := state.attributes.get(ATTR_STATE_CLASS)) and ( type(state_class) is SensorStateClass or try_parse_enum(SensorStateClass, state_class) ) and instance.entity_filter(state.entity_id) ]
Calculate a time weighted average. The average is calculated by weighting the states by duration in seconds between state changes. Note: there's no interpolation of values between state changes.
def _time_weighted_average( fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime ) -> float: """Calculate a time weighted average. The average is calculated by weighting the states by duration in seconds between state changes. Note: there's no interpolation of values between state changes. """ old_fstate: float | None = None old_start_time: datetime.datetime | None = None accumulated = 0.0 for fstate, state in fstates: # The recorder will give us the last known state, which may be well # before the requested start time for the statistics start_time = start if state.last_updated < start else state.last_updated if old_start_time is None: # Adjust start time, if there was no last known state start = start_time else: duration = start_time - old_start_time # Accumulate the value, weighted by duration until next state change assert old_fstate is not None accumulated += old_fstate * duration.total_seconds() old_fstate = fstate old_start_time = start_time if old_fstate is not None: # Accumulate the value, weighted by duration until end of the period assert old_start_time is not None duration = end - old_start_time accumulated += old_fstate * duration.total_seconds() period_seconds = (end - start).total_seconds() if period_seconds == 0: # If the only state changed that happened was at the exact moment # at the end of the period, we can't calculate a meaningful average # so we return 0.0 since it represents a time duration smaller than # we can measure. This probably means the precision of statistics # column schema in the database is incorrect but it is actually possible # to happen if the state change event fired at the exact microsecond return 0.0 return accumulated / period_seconds
Return a set of all units.
def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]: """Return a set of all units.""" return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates}
Return True if the units are equivalent.
def _equivalent_units(units: set[str | None]) -> bool: """Return True if the units are equivalent.""" if len(units) == 1: return True units = { EQUIVALENT_UNITS[unit] if unit in EQUIVALENT_UNITS else unit # noqa: SIM401 for unit in units } return len(units) == 1
Return a list of (float, state) tuples for the given entity.
def _entity_history_to_float_and_state( entity_history: Iterable[State], ) -> list[tuple[float, State]]: """Return a list of (float, state) tuples for the given entity.""" float_states: list[tuple[float, State]] = [] append = float_states.append isfinite = math.isfinite for state in entity_history: try: if (float_state := float(state.state)) is not None and isfinite( float_state ): append((float_state, state)) except (ValueError, TypeError): pass return float_states
Normalize units.
def _normalize_states( hass: HomeAssistant, old_metadatas: dict[str, tuple[int, StatisticMetaData]], fstates: list[tuple[float, State]], entity_id: str, ) -> tuple[str | None, list[tuple[float, State]]]: """Normalize units.""" state_unit: str | None = None statistics_unit: str | None state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) old_metadata = old_metadatas[entity_id][1] if entity_id in old_metadatas else None if not old_metadata: # We've not seen this sensor before, the first valid state determines the unit # used for statistics statistics_unit = state_unit else: # We have seen this sensor before, use the unit from metadata statistics_unit = old_metadata["unit_of_measurement"] if statistics_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER: # The unit used by this sensor doesn't support unit conversion all_units = _get_units(fstates) if not _equivalent_units(all_units): if WARN_UNSTABLE_UNIT not in hass.data: hass.data[WARN_UNSTABLE_UNIT] = set() if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: hass.data[WARN_UNSTABLE_UNIT].add(entity_id) extra = "" if old_metadata: extra = ( " and matches the unit of already compiled statistics " f"({old_metadata['unit_of_measurement']})" ) _LOGGER.warning( ( "The unit of %s is changing, got multiple %s, generation of" " long term statistics will be suppressed unless the unit is" " stable%s. Go to %s to fix this" ), entity_id, all_units, extra, LINK_DEV_STATISTICS, ) return None, [] state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) return state_unit, fstates converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit] valid_fstates: list[tuple[float, State]] = [] convert: Callable[[float], float] | None = None last_unit: str | None | object = object() valid_units = converter.VALID_UNITS for fstate, state in fstates: state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) # Exclude states with unsupported unit from statistics if state_unit not in valid_units: if WARN_UNSUPPORTED_UNIT not in hass.data: hass.data[WARN_UNSUPPORTED_UNIT] = set() if entity_id not in hass.data[WARN_UNSUPPORTED_UNIT]: hass.data[WARN_UNSUPPORTED_UNIT].add(entity_id) _LOGGER.warning( ( "The unit of %s (%s) cannot be converted to the unit of" " previously compiled statistics (%s). Generation of long term" " statistics will be suppressed unless the unit changes back to" " %s or a compatible unit. Go to %s to fix this" ), entity_id, state_unit, statistics_unit, statistics_unit, LINK_DEV_STATISTICS, ) continue if state_unit != last_unit: # The unit of measurement has changed since the last state change # recreate the converter factory if state_unit == statistics_unit: convert = None else: convert = converter.converter_factory(state_unit, statistics_unit) last_unit = state_unit if convert is not None: fstate = convert(fstate) valid_fstates.append((fstate, state)) return statistics_unit, valid_fstates
Suggest to report an issue.
def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str: """Suggest to report an issue.""" entity_info = entity_sources(hass).get(entity_id) return async_suggest_report_issue( hass, integration_domain=entity_info["domain"] if entity_info else None )
Log a warning once if a sensor with state_class_total has a decreasing value. The log will be suppressed until two dips have been seen to prevent warning due to rounding issues with databases storing the state as a single precision float, which was fixed in recorder DB version 20.
def warn_dip( hass: HomeAssistant, entity_id: str, state: State, previous_fstate: float ) -> None: """Log a warning once if a sensor with state_class_total has a decreasing value. The log will be suppressed until two dips have been seen to prevent warning due to rounding issues with databases storing the state as a single precision float, which was fixed in recorder DB version 20. """ if SEEN_DIP not in hass.data: hass.data[SEEN_DIP] = set() if entity_id not in hass.data[SEEN_DIP]: hass.data[SEEN_DIP].add(entity_id) return if WARN_DIP not in hass.data: hass.data[WARN_DIP] = set() if entity_id not in hass.data[WARN_DIP]: hass.data[WARN_DIP].add(entity_id) entity_info = entity_sources(hass).get(entity_id) domain = entity_info["domain"] if entity_info else None if domain in ["energy", "growatt_server", "solaredge"]: return _LOGGER.warning( ( "Entity %s %shas state class total_increasing, but its state is not" " strictly increasing. Triggered by state %s (%s) with last_updated set" " to %s. Please %s" ), entity_id, f"from integration {domain} " if domain else "", state.state, previous_fstate, state.last_updated.isoformat(), _suggest_report_issue(hass, entity_id), )