response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Log helper.
def _log_error_setup_error( hass: HomeAssistant, domain: str, integration: loader.Integration | None, msg: str, exc_info: Exception | None = None, ) -> None: """Log helper.""" if integration is None: custom = "" link = None else: custom = "" if integration.is_built_in else "custom integration " link = integration.documentation _LOGGER.error("Setup failed for %s'%s': %s", custom, domain, msg, exc_info=exc_info) async_notify_setup_error(hass, domain, link)
Call a method when a component is setup.
def async_when_setup( hass: core.HomeAssistant, component: str, when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]], ) -> None: """Call a method when a component is setup.""" _async_when_setup(hass, component, when_setup_cb, False)
Call a method when a component is setup or state is fired.
def async_when_setup_or_start( hass: core.HomeAssistant, component: str, when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]], ) -> None: """Call a method when a component is setup or state is fired.""" _async_when_setup(hass, component, when_setup_cb, True)
Call a method when a component is setup or the start event fires.
def _async_when_setup( hass: core.HomeAssistant, component: str, when_setup_cb: Callable[[core.HomeAssistant, str], Awaitable[None]], start_event: bool, ) -> None: """Call a method when a component is setup or the start event fires.""" async def when_setup() -> None: """Call the callback.""" try: await when_setup_cb(hass, component) except Exception: # pylint: disable=broad-except _LOGGER.exception("Error handling when_setup callback for %s", component) if component in hass.config.components: hass.async_create_task_internal( when_setup(), f"when setup {component}", eager_start=True ) return listeners: list[CALLBACK_TYPE] = [] async def _matched_event(event: Event[Any]) -> None: """Call the callback when we matched an event.""" for listener in listeners: listener() await when_setup() @callback def _async_is_component_filter(event_data: EventComponentLoaded) -> bool: """Check if the event is for the component.""" return event_data[ATTR_COMPONENT] == component listeners.append( hass.bus.async_listen( EVENT_COMPONENT_LOADED, _matched_event, event_filter=_async_is_component_filter, ) ) if start_event: listeners.append( hass.bus.async_listen(EVENT_HOMEASSISTANT_START, _matched_event) )
Return the complete list of loaded integrations.
def async_get_loaded_integrations(hass: core.HomeAssistant) -> set[str]: """Return the complete list of loaded integrations.""" integrations = set() for component in hass.config.components: if "." not in component: integrations.add(component) continue platform, _, domain = component.partition(".") if domain in BASE_PLATFORMS: integrations.add(platform) return integrations
Return the setup started dict.
def _setup_started( hass: core.HomeAssistant, ) -> dict[tuple[str, str | None], float]: """Return the setup started dict.""" return {}
Keep track of time we are blocked waiting for other operations. We want to count the time we wait for importing and setting up the base components so we can subtract it from the total setup time.
def async_pause_setup( hass: core.HomeAssistant, phase: SetupPhases ) -> Generator[None, None, None]: """Keep track of time we are blocked waiting for other operations. We want to count the time we wait for importing and setting up the base components so we can subtract it from the total setup time. """ if not (running := current_setup_group.get()) or running not in _setup_started( hass ): # This means we are likely in a late platform setup # that is running in a task so we do not want # to subtract out the time later as nothing is waiting # for the code inside the context manager to finish. yield return started = time.monotonic() try: yield finally: time_taken = time.monotonic() - started integration, group = running # Add negative time for the time we waited _setup_times(hass)[integration][group][phase] = -time_taken _LOGGER.debug( "Adding wait for %s for %s (%s) of %.2f", phase, integration, group, time_taken, )
Return the setup timings default dict.
def _setup_times( hass: core.HomeAssistant, ) -> defaultdict[str, defaultdict[str | None, defaultdict[SetupPhases, float]]]: """Return the setup timings default dict.""" return defaultdict(lambda: defaultdict(lambda: defaultdict(float)))
Keep track of when setup starts and finishes. :param hass: Home Assistant instance :param integration: The integration that is being setup :param phase: The phase of setup :param group: The group (config entry/platform instance) that is being setup A group is a group of setups that run in parallel.
def async_start_setup( hass: core.HomeAssistant, integration: str, phase: SetupPhases, group: str | None = None, ) -> Generator[None, None, None]: """Keep track of when setup starts and finishes. :param hass: Home Assistant instance :param integration: The integration that is being setup :param phase: The phase of setup :param group: The group (config entry/platform instance) that is being setup A group is a group of setups that run in parallel. """ if hass.is_stopping or hass.state is core.CoreState.running: # Don't track setup times when we are shutting down or already running # as we present the timings as "Integration startup time", and we # don't want to add all the setup retry times to that. yield return setup_started = _setup_started(hass) current = (integration, group) if current in setup_started: # We are already inside another async_start_setup, this like means we # are setting up a platform inside async_setup_entry so we should not # record this as a new setup yield return started = time.monotonic() current_setup_group.set(current) setup_started[current] = started try: yield finally: time_taken = time.monotonic() - started del setup_started[current] group_setup_times = _setup_times(hass)[integration][group] # We may see the phase multiple times if there are multiple # platforms, but we only care about the longest time. group_setup_times[phase] = max(group_setup_times[phase], time_taken) if group is None: _LOGGER.info( "Setup of domain %s took %.2f seconds", integration, time_taken ) elif _LOGGER.isEnabledFor(logging.DEBUG): wait_time = -sum(value for value in group_setup_times.values() if value < 0) calculated_time = time_taken - wait_time _LOGGER.debug( "Phase %s for %s (%s) took %.2fs (elapsed=%.2fs) (wait_time=%.2fs)", phase, integration, group, calculated_time, time_taken, wait_time, )
Return timing data for each integration.
def async_get_setup_timings(hass: core.HomeAssistant) -> dict[str, float]: """Return timing data for each integration.""" setup_time = _setup_times(hass) domain_timings: dict[str, float] = {} top_level_timings: Mapping[SetupPhases, float] for domain, timings in setup_time.items(): top_level_timings = timings.get(None, {}) total_top_level = sum(top_level_timings.values()) # Groups (config entries/platform instance) are setup in parallel so we # take the max of the group timings and add it to the top level group_totals = { group: sum(group_timings.values()) for group, group_timings in timings.items() if group is not None } group_max = max(group_totals.values(), default=0) domain_timings[domain] = total_top_level + group_max return domain_timings
Validate that Home Assistant is running in a supported operating system.
def validate_os() -> None: """Validate that Home Assistant is running in a supported operating system.""" if not sys.platform.startswith(("darwin", "linux")): print( "Home Assistant only supports Linux, OSX and Windows using WSL", file=sys.stderr, ) sys.exit(1)
Validate that the right Python version is running.
def validate_python() -> None: """Validate that the right Python version is running.""" if sys.version_info[:3] < REQUIRED_PYTHON_VER: print( "Home Assistant requires at least Python " f"{REQUIRED_PYTHON_VER[0]}.{REQUIRED_PYTHON_VER[1]}.{REQUIRED_PYTHON_VER[2]}", file=sys.stderr, ) sys.exit(1)
Validate the configuration directory.
def ensure_config_path(config_dir: str) -> None: """Validate the configuration directory.""" # pylint: disable-next=import-outside-toplevel from . import config as config_util lib_dir = os.path.join(config_dir, "deps") # Test if configuration directory exists if not os.path.isdir(config_dir): if config_dir != config_util.get_default_config_dir(): if os.path.exists(config_dir): reason = "is not a directory" else: reason = "does not exist" print( f"Fatal Error: Specified configuration directory {config_dir} {reason}", file=sys.stderr, ) sys.exit(1) try: os.mkdir(config_dir) except OSError as ex: print( "Fatal Error: Unable to create default configuration " f"directory {config_dir}: {ex}", file=sys.stderr, ) sys.exit(1) # Test if library directory exists if not os.path.isdir(lib_dir): try: os.mkdir(lib_dir) except OSError as ex: print( f"Fatal Error: Unable to create library directory {lib_dir}: {ex}", file=sys.stderr, ) sys.exit(1)
Get parsed passed in arguments.
def get_arguments() -> argparse.Namespace: """Get parsed passed in arguments.""" # pylint: disable-next=import-outside-toplevel from . import config as config_util parser = argparse.ArgumentParser( description="Home Assistant: Observe, Control, Automate.", epilog=f"If restart is requested, exits with code {RESTART_EXIT_CODE}", ) parser.add_argument("--version", action="version", version=__version__) parser.add_argument( "-c", "--config", metavar="path_to_config_dir", default=config_util.get_default_config_dir(), help="Directory that contains the Home Assistant configuration", ) parser.add_argument( "--recovery-mode", action="store_true", help="Start Home Assistant in recovery mode", ) parser.add_argument( "--debug", action="store_true", help="Start Home Assistant in debug mode" ) parser.add_argument( "--open-ui", action="store_true", help="Open the webinterface in a browser" ) skip_pip_group = parser.add_mutually_exclusive_group() skip_pip_group.add_argument( "--skip-pip", action="store_true", help="Skips pip install of required packages on startup", ) skip_pip_group.add_argument( "--skip-pip-packages", metavar="package_names", type=lambda arg: arg.split(","), default=[], help="Skip pip install of specific packages on startup", ) parser.add_argument( "-v", "--verbose", action="store_true", help="Enable verbose logging to file." ) parser.add_argument( "--log-rotate-days", type=int, default=None, help="Enables daily log rotation and keeps up to the specified days", ) parser.add_argument( "--log-file", type=str, default=None, help="Log file to write to. If not set, CONFIG/home-assistant.log is used", ) parser.add_argument( "--log-no-color", action="store_true", help="Disable color logs" ) parser.add_argument( "--script", nargs=argparse.REMAINDER, help="Run one of the embedded scripts" ) parser.add_argument( "--ignore-os-check", action="store_true", help="Skips validation of operating system", ) return parser.parse_args()
Check if there are any lingering threads.
def check_threads() -> None: """Check if there are any lingering threads.""" try: nthreads = sum( thread.is_alive() and not thread.daemon for thread in threading.enumerate() ) if nthreads > 1: sys.stderr.write(f"Found {nthreads} non-daemonic threads.\n") # Somehow we sometimes seem to trigger an assertion in the python threading # module. It seems we find threads that have no associated OS level thread # which are not marked as stopped at the python level. except AssertionError: sys.stderr.write("Failed to count non-daemonic threads.\n")
Start Home Assistant.
def main() -> int: """Start Home Assistant.""" validate_python() args = get_arguments() if not args.ignore_os_check: validate_os() if args.script is not None: # pylint: disable-next=import-outside-toplevel from . import scripts return scripts.run(args.script) config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config)) ensure_config_path(config_dir) # pylint: disable-next=import-outside-toplevel from . import config, runner safe_mode = config.safe_mode_enabled(config_dir) runtime_conf = runner.RuntimeConfig( config_dir=config_dir, verbose=args.verbose, log_rotate_days=args.log_rotate_days, log_file=args.log_file, log_no_color=args.log_no_color, skip_pip=args.skip_pip, skip_pip_packages=args.skip_pip_packages, recovery_mode=args.recovery_mode, debug=args.debug, open_ui=args.open_ui, safe_mode=safe_mode, ) fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME) with open(fault_file_name, mode="a", encoding="utf8") as fault_file: faulthandler.enable(fault_file) exit_code = runner.run(runtime_conf) faulthandler.disable() if os.path.getsize(fault_file_name) == 0: os.remove(fault_file_name) check_threads() return exit_code
Create system admin group.
def _system_admin_group() -> models.Group: """Create system admin group.""" return models.Group( name=GROUP_NAME_ADMIN, id=GROUP_ID_ADMIN, policy=system_policies.ADMIN_POLICY, system_generated=True, )
Create system user group.
def _system_user_group() -> models.Group: """Create system user group.""" return models.Group( name=GROUP_NAME_USER, id=GROUP_ID_USER, policy=system_policies.USER_POLICY, system_generated=True, )
Create read only group.
def _system_read_only_group() -> models.Group: """Create read only group.""" return models.Group( name=GROUP_NAME_READ_ONLY, id=GROUP_ID_READ_ONLY, policy=system_policies.READ_ONLY_POLICY, system_generated=True, )
Decode the payload from a JWS dictionary.
def _decode_payload(json_payload: str) -> dict[str, Any]: """Decode the payload from a JWS dictionary.""" try: payload = json_loads(json_payload) except ValueError as err: raise DecodeError(f"Invalid payload string: {err}") from err if not isinstance(payload, dict): raise DecodeError("Invalid payload string: must be a json object") return payload
Handle a change to a permissions.
def _handle_permissions_change(self: User, user_attr: Attribute, new: Any) -> Any: """Handle a change to a permissions.""" self.invalidate_cache() return validate(self, user_attr, new)
Generate a secret.
def _generate_secret() -> str: """Generate a secret.""" import pyotp # pylint: disable=import-outside-toplevel return str(pyotp.random_base32())
Generate a 32 digit number.
def _generate_random() -> int: """Generate a 32 digit number.""" import pyotp # pylint: disable=import-outside-toplevel return int(pyotp.random_base32(length=32, chars=list("1234567890")))
Generate one time password.
def _generate_otp(secret: str, count: int) -> str: """Generate one time password.""" import pyotp # pylint: disable=import-outside-toplevel return str(pyotp.HOTP(secret).at(count))
Verify one time password.
def _verify_otp(secret: str, otp: str, count: int) -> bool: """Verify one time password.""" import pyotp # pylint: disable=import-outside-toplevel return bool(pyotp.HOTP(secret).verify(otp, count))
Generate a base64 PNG string represent QR Code image of data.
def _generate_qr_code(data: str) -> str: """Generate a base64 PNG string represent QR Code image of data.""" import pyqrcode # pylint: disable=import-outside-toplevel qr_code = pyqrcode.create(data) with BytesIO() as buffer: qr_code.svg(file=buffer, scale=4) return str( buffer.getvalue() .decode("ascii") .replace("\n", "") .replace( ( '<?xml version="1.0" encoding="UTF-8"?>' '<svg xmlns="http://www.w3.org/2000/svg"' ), "<svg", ) )
Generate a secret, url, and QR code.
def _generate_secret_and_qr_code(username: str) -> tuple[str, str, str]: """Generate a secret, url, and QR code.""" import pyotp # pylint: disable=import-outside-toplevel ota_secret = pyotp.random_base32() url = pyotp.totp.TOTP(ota_secret).provisioning_uri( username, issuer_name="Home Assistant" ) image = _generate_qr_code(url) return ota_secret, url, image
Look up entity permissions by domain.
def _lookup_domain( perm_lookup: PermissionLookup, domains_dict: SubCategoryDict, entity_id: str ) -> ValueType | None: """Look up entity permissions by domain.""" return domains_dict.get(entity_id.partition(".")[0])
Look up entity permissions by area.
def _lookup_area( perm_lookup: PermissionLookup, area_dict: SubCategoryDict, entity_id: str ) -> ValueType | None: """Look up entity permissions by area.""" entity_entry = perm_lookup.entity_registry.async_get(entity_id) if entity_entry is None or entity_entry.device_id is None: return None device_entry = perm_lookup.device_registry.async_get(entity_entry.device_id) if device_entry is None or device_entry.area_id is None: return None return area_dict.get(device_entry.area_id)
Look up entity permissions by device.
def _lookup_device( perm_lookup: PermissionLookup, devices_dict: SubCategoryDict, entity_id: str ) -> ValueType | None: """Look up entity permissions by device.""" entity_entry = perm_lookup.entity_registry.async_get(entity_id) if entity_entry is None or entity_entry.device_id is None: return None return devices_dict.get(entity_entry.device_id)
Look up entity permission by entity id.
def _lookup_entity_id( perm_lookup: PermissionLookup, entities_dict: SubCategoryDict, entity_id: str ) -> ValueType | None: """Look up entity permission by entity id.""" return entities_dict.get(entity_id)
Compile policy into a function that tests policy.
def compile_entities( policy: CategoryType, perm_lookup: PermissionLookup ) -> Callable[[str, str], bool]: """Compile policy into a function that tests policy.""" subcategories: SubCatLookupType = OrderedDict() subcategories[ENTITY_ENTITY_IDS] = _lookup_entity_id subcategories[ENTITY_DEVICE_IDS] = _lookup_device subcategories[ENTITY_AREAS] = _lookup_area subcategories[ENTITY_DOMAINS] = _lookup_domain subcategories[SUBCAT_ALL] = lookup_all return compile_policy(policy, subcategories, perm_lookup)
Merge policies.
def merge_policies(policies: list[PolicyType]) -> PolicyType: """Merge policies.""" new_policy: dict[str, CategoryType] = {} seen: set[str] = set() for policy in policies: for category in policy: if category in seen: continue seen.add(category) new_policy[category] = _merge_policies( [policy.get(category) for policy in policies] ) cast(PolicyType, new_policy) return new_policy
Merge a policy.
def _merge_policies(sources: list[CategoryType]) -> CategoryType: """Merge a policy.""" # When merging policies, the most permissive wins. # This means we order it like this: # True > Dict > None # # True: allow everything # Dict: specify more granular permissions # None: no opinion # # If there are multiple sources with a dict as policy, we recursively # merge each key in the source. policy: CategoryType = None seen: set[str] = set() for source in sources: if source is None: continue # A source that's True will always win. Shortcut return. if source is True: return True assert isinstance(source, dict) if policy is None: policy = cast(CategoryType, {}) assert isinstance(policy, dict) for key in source: if key in seen: continue seen.add(key) key_sources = [src.get(key) for src in sources if isinstance(src, dict)] policy[key] = _merge_policies(key_sources) return policy
Look up permission for all.
def lookup_all( perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str ) -> ValueType: """Look up permission for all.""" # In case of ALL category, lookup_dict IS the schema. return cast(ValueType, lookup_dict)
Compile policy into a function that tests policy. Subcategories are mapping key -> lookup function, ordered by highest priority first.
def compile_policy( policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup ) -> Callable[[str, str], bool]: """Compile policy into a function that tests policy. Subcategories are mapping key -> lookup function, ordered by highest priority first. """ # None, False, empty dict if not policy: def apply_policy_deny_all(entity_id: str, key: str) -> bool: """Decline all.""" return False return apply_policy_deny_all if policy is True: def apply_policy_allow_all(entity_id: str, key: str) -> bool: """Approve all.""" return True return apply_policy_allow_all assert isinstance(policy, dict) funcs: list[Callable[[str, str], bool | None]] = [] for key, lookup_func in subcategories.items(): lookup_value = policy.get(key) # If any lookup value is `True`, it will always be positive if isinstance(lookup_value, bool): return lambda object_id, key: True if lookup_value is not None: funcs.append(_gen_dict_test_func(perm_lookup, lookup_func, lookup_value)) if len(funcs) == 1: func = funcs[0] @wraps(func) def apply_policy_func(object_id: str, key: str) -> bool: """Apply a single policy function.""" return func(object_id, key) is True return apply_policy_func def apply_policy_funcs(object_id: str, key: str) -> bool: """Apply several policy functions.""" for func in funcs: if (result := func(object_id, key)) is not None: return result return False return apply_policy_funcs
Generate a lookup function.
def _gen_dict_test_func( perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict ) -> Callable[[str, str], bool | None]: """Generate a lookup function.""" def test_value(object_id: str, key: str) -> bool | None: """Test if permission is allowed based on the keys.""" schema: ValueType = lookup_func(perm_lookup, lookup_dict, object_id) if schema is None or isinstance(schema, bool): return schema assert isinstance(schema, dict) return schema.get(key) return test_value
Test if a policy has an ALL access for a specific key.
def test_all(policy: CategoryType, key: str) -> bool: """Test if a policy has an ALL access for a specific key.""" if not isinstance(policy, dict): return bool(policy) all_policy = policy.get(SUBCAT_ALL) if not isinstance(all_policy, dict): return bool(all_policy) return all_policy.get(key, False)
Disallow ID in config.
def _disallow_id(conf: dict[str, Any]) -> dict[str, Any]: """Disallow ID in config.""" if CONF_ID in conf: raise vol.Invalid("ID is not allowed for the homeassistant auth provider.") return conf
Get the provider.
def async_get_provider(hass: HomeAssistant) -> HassAuthProvider: """Get the provider.""" for prv in hass.auth.auth_providers: if prv.type == "homeassistant": return cast(HassAuthProvider, prv) raise RuntimeError("Provider not found")
Load up the module to call the is_on method. If there is no entity id given we will check all.
def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool: """Load up the module to call the is_on method. If there is no entity id given we will check all. """ report( ( "uses homeassistant.components.is_on." " This is deprecated and will stop working in Home Assistant 2024.9, it" " should be updated to use the function of the platform directly." ), error_if_core=True, ) if entity_id: entity_ids = expand_entity_ids(hass, [entity_id]) else: entity_ids = hass.states.entity_ids() for ent_id in entity_ids: domain = split_entity_id(ent_id)[0] try: component = getattr(hass.components, domain) except ImportError: _LOGGER.error("Failed to call %s.is_on: component not found", domain) continue if not hasattr(component, "is_on"): _LOGGER.warning("Integration %s has no is_on method", domain) continue if component.is_on(ent_id): return True return False
Home Assistant services.
def setup_hass_services(hass: HomeAssistant) -> None: """Home Assistant services.""" def change_setting(call: ServiceCall) -> None: """Change an Abode system setting.""" setting = call.data[ATTR_SETTING] value = call.data[ATTR_VALUE] try: hass.data[DOMAIN].abode.set_setting(setting, value) except AbodeException as ex: LOGGER.warning(ex) def capture_image(call: ServiceCall) -> None: """Capture a new image.""" entity_ids = call.data[ATTR_ENTITY_ID] target_entities = [ entity_id for entity_id in hass.data[DOMAIN].entity_ids if entity_id in entity_ids ] for entity_id in target_entities: signal = f"abode_camera_capture_{entity_id}" dispatcher_send(hass, signal) def trigger_automation(call: ServiceCall) -> None: """Trigger an Abode automation.""" entity_ids = call.data[ATTR_ENTITY_ID] target_entities = [ entity_id for entity_id in hass.data[DOMAIN].entity_ids if entity_id in entity_ids ] for entity_id in target_entities: signal = f"abode_trigger_automation_{entity_id}" dispatcher_send(hass, signal) hass.services.register( DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA ) hass.services.register( DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA ) hass.services.register( DOMAIN, SERVICE_TRIGGER_AUTOMATION, trigger_automation, schema=AUTOMATION_SCHEMA )
Event callbacks.
def setup_abode_events(hass: HomeAssistant) -> None: """Event callbacks.""" def event_callback(event: str, event_json: dict[str, str]) -> None: """Handle an event callback from Abode.""" data = { ATTR_DEVICE_ID: event_json.get(ATTR_DEVICE_ID, ""), ATTR_DEVICE_NAME: event_json.get(ATTR_DEVICE_NAME, ""), ATTR_DEVICE_TYPE: event_json.get(ATTR_DEVICE_TYPE, ""), ATTR_EVENT_CODE: event_json.get(ATTR_EVENT_CODE, ""), ATTR_EVENT_NAME: event_json.get(ATTR_EVENT_NAME, ""), ATTR_EVENT_TYPE: event_json.get(ATTR_EVENT_TYPE, ""), ATTR_EVENT_UTC: event_json.get(ATTR_EVENT_UTC, ""), ATTR_USER_NAME: event_json.get(ATTR_USER_NAME, ""), ATTR_APP_TYPE: event_json.get(ATTR_APP_TYPE, ""), ATTR_EVENT_BY: event_json.get(ATTR_EVENT_BY, ""), ATTR_DATE: event_json.get(ATTR_DATE, ""), ATTR_TIME: event_json.get(ATTR_TIME, ""), } hass.bus.fire(event, data) events = [ GROUPS.ALARM, GROUPS.ALARM_END, GROUPS.PANEL_FAULT, GROUPS.PANEL_RESTORE, GROUPS.AUTOMATION, GROUPS.DISARM, GROUPS.ARM, GROUPS.ARM_FAULT, GROUPS.TEST, GROUPS.CAPTURE, GROUPS.DEVICE, ] for event in events: hass.data[DOMAIN].abode.events.add_event_callback( event, partial(event_callback, event) )
Get device info.
def _get_device_info(location_key: str, name: str) -> DeviceInfo: """Get device info.""" return DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, location_key)}, manufacturer=MANUFACTURER, name=name, # You don't need to provide specific details for the URL, # so passing in _ characters is fine if the location key # is correct configuration_url=( "http://accuweather.com/en/" f"_/_/{location_key}/weather-forecast/{location_key}/" ), )
Register system health callbacks.
def async_register( hass: HomeAssistant, register: system_health.SystemHealthRegistration ) -> None: """Register system health callbacks.""" register.async_register_info(system_health_info)
Connect with serial port and return Acer Projector.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Connect with serial port and return Acer Projector.""" serial_port = config[CONF_FILENAME] name = config[CONF_NAME] timeout = config[CONF_TIMEOUT] write_timeout = config[CONF_WRITE_TIMEOUT] add_entities([AcerSwitch(serial_port, name, timeout, write_timeout)], True)
Add any new entities.
def async_add_acmeda_entities( hass: HomeAssistant, entity_class: type, config_entry: ConfigEntry, current: set[int], async_add_entities: AddEntitiesCallback, ) -> None: """Add any new entities.""" hub = hass.data[DOMAIN][config_entry.entry_id] LOGGER.debug("Looking for new %s on: %s", entity_class.__name__, hub.host) api = hub.api.rollers new_items = [] for unique_id, roller in api.items(): if unique_id not in current: LOGGER.debug("New %s %s", entity_class.__name__, unique_id) new_item = entity_class(roller) current.add(unique_id) new_items.append(new_item) async_add_entities(new_items)
Validate the configuration and return an Actiontec scanner.
def get_scanner( hass: HomeAssistant, config: ConfigType ) -> ActiontecDeviceScanner | None: """Validate the configuration and return an Actiontec scanner.""" scanner = ActiontecDeviceScanner(config[DOMAIN]) return scanner if scanner.success_init else None
Set up the Binary Sensor platform for ADS.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Binary Sensor platform for ADS.""" ads_hub = hass.data.get(DATA_ADS) ads_var = config[CONF_ADS_VAR] name = config[CONF_NAME] device_class = config.get(CONF_DEVICE_CLASS) ads_sensor = AdsBinarySensor(ads_hub, name, ads_var, device_class) add_entities([ads_sensor])
Set up the cover platform for ADS.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the cover platform for ADS.""" ads_hub = hass.data[DATA_ADS] ads_var_is_closed = config.get(CONF_ADS_VAR) ads_var_position = config.get(CONF_ADS_VAR_POSITION) ads_var_pos_set = config.get(CONF_ADS_VAR_SET_POS) ads_var_open = config.get(CONF_ADS_VAR_OPEN) ads_var_close = config.get(CONF_ADS_VAR_CLOSE) ads_var_stop = config.get(CONF_ADS_VAR_STOP) name = config[CONF_NAME] device_class = config.get(CONF_DEVICE_CLASS) add_entities( [ AdsCover( ads_hub, ads_var_is_closed, ads_var_position, ads_var_pos_set, ads_var_open, ads_var_close, ads_var_stop, name, device_class, ) ] )
Set up the light platform for ADS.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the light platform for ADS.""" ads_hub = hass.data.get(DATA_ADS) ads_var_enable = config[CONF_ADS_VAR] ads_var_brightness = config.get(CONF_ADS_VAR_BRIGHTNESS) name = config[CONF_NAME] add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness, name)])
Set up an ADS sensor device.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up an ADS sensor device.""" ads_hub = hass.data.get(ads.DATA_ADS) ads_var = config[CONF_ADS_VAR] ads_type = config[CONF_ADS_TYPE] name = config[CONF_NAME] unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) factor = config.get(CONF_ADS_FACTOR) entity = AdsSensor(ads_hub, ads_var, ads_type, name, unit_of_measurement, factor) add_entities([entity])
Set up switch platform for ADS.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up switch platform for ADS.""" ads_hub = hass.data.get(DATA_ADS) name = config[CONF_NAME] ads_var = config[CONF_ADS_VAR] add_entities([AdsSwitch(ads_hub, name, ads_var)])
Set up the ADS component.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ADS component.""" conf = config[DOMAIN] net_id = conf[CONF_DEVICE] ip_address = conf.get(CONF_IP_ADDRESS) port = conf[CONF_PORT] client = pyads.Connection(net_id, port, ip_address) try: ads = AdsHub(client) except pyads.ADSError: _LOGGER.error( "Could not connect to ADS host (netid=%s, ip=%s, port=%s)", net_id, ip_address, port, ) return False hass.data[DATA_ADS] = ads hass.bus.listen(EVENT_HOMEASSISTANT_STOP, ads.shutdown) def handle_write_data_by_name(call: ServiceCall) -> None: """Write a value to the connected ADS device.""" ads_var = call.data[CONF_ADS_VAR] ads_type = call.data[CONF_ADS_TYPE] value = call.data[CONF_ADS_VALUE] try: ads.write_by_name(ads_var, value, ADS_TYPEMAP[ads_type]) except pyads.ADSError as err: _LOGGER.error(err) hass.services.register( DOMAIN, SERVICE_WRITE_DATA_BY_NAME, handle_write_data_by_name, schema=SCHEMA_SERVICE_WRITE_DATA_BY_NAME, ) return True
Create a URL from the host and port.
def generate_url(host, port) -> str: """Create a URL from the host and port.""" server_origin = host if "://" not in host: server_origin = f"http://{host}" if server_origin[-1] == "/": server_origin = server_origin[:-1] return f"{server_origin}:{port}/"
Return data update interval. The number of requests is reset at midnight UTC so we calculate the update interval based on number of minutes until midnight, the number of Airly instances and the number of remaining requests.
def set_update_interval(instances_count: int, requests_remaining: int) -> timedelta: """Return data update interval. The number of requests is reset at midnight UTC so we calculate the update interval based on number of minutes until midnight, the number of Airly instances and the number of remaining requests. """ now = dt_util.utcnow() midnight = dt_util.find_next_time_expression_time( now, seconds=[0], minutes=[0], hours=[0] ) minutes_to_midnight = (midnight - now).total_seconds() / 60 interval = timedelta( minutes=min( max( ceil(minutes_to_midnight / requests_remaining * instances_count), MIN_UPDATE_INTERVAL, ), MAX_UPDATE_INTERVAL, ) ) _LOGGER.debug("Data will be update every %s", interval) return interval
Register system health callbacks.
def async_register( hass: HomeAssistant, register: system_health.SystemHealthRegistration ) -> None: """Register system health callbacks.""" register.async_register_info(system_health_info)
Process extra attributes for station location (if available).
def station_extra_attrs(data: dict[str, Any]) -> dict[str, Any]: """Process extra attributes for station location (if available).""" if ATTR_API_STATION in data: return { "lat": data.get(ATTR_API_STATION_LATITUDE), "long": data.get(ATTR_API_STATION_LONGITUDE), } return {}
Generate name with model and identifier for device.
def get_name(device: AirthingsDevice) -> str: """Generate name with model and identifier for device.""" name = device.friendly_name() if identifier := device.identifier: name += f" ({identifier})" return name
Migrate entities to new unique ids (with BLE Address).
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None: """Migrate entities to new unique ids (with BLE Address).""" ent_reg = entity_async_get(hass) unique_id_trailer = f"_{sensor_name}" new_unique_id = f"{address}{unique_id_trailer}" if ent_reg.async_get_entity_id(DOMAIN, Platform.SENSOR, new_unique_id): # New unique id already exists return dev_reg = device_async_get(hass) if not ( device := dev_reg.async_get_device( connections={(CONNECTION_BLUETOOTH, address)} ) ): return entities = async_entries_for_device( ent_reg, device_id=device.id, include_disabled_entities=True, ) matching_reg_entry: RegistryEntry | None = None for entry in entities: if entry.unique_id.endswith(unique_id_trailer) and ( not matching_reg_entry or "(" not in entry.unique_id ): matching_reg_entry = entry if not matching_reg_entry or matching_reg_entry.unique_id == new_unique_id: # Already has the newest unique id format return entity_id = matching_reg_entry.entity_id ent_reg.async_update_entity(entity_id=entity_id, new_unique_id=new_unique_id) _LOGGER.debug("Migrated entity '%s' to unique id '%s'", entity_id, new_unique_id)
Get a leveled scan interval for a particular cloud API key. This will shift based on the number of active consumers, thus keeping the user under the monthly API limit.
def async_get_cloud_api_update_interval( hass: HomeAssistant, api_key: str, num_consumers: int ) -> timedelta: """Get a leveled scan interval for a particular cloud API key. This will shift based on the number of active consumers, thus keeping the user under the monthly API limit. """ # Assuming 10,000 calls per month and a "largest possible month" of 31 days; note # that we give a buffer of 1500 API calls for any drift, restarts, etc.: minutes_between_api_calls = ceil(num_consumers * 31 * 24 * 60 / 8500) LOGGER.debug( "Leveling API key usage (%s): %s consumers, %s minutes between updates", api_key, num_consumers, minutes_between_api_calls, ) return timedelta(minutes=minutes_between_api_calls)
Get all DataUpdateCoordinator objects related to a particular API key.
def async_get_cloud_coordinators_by_api_key( hass: HomeAssistant, api_key: str ) -> list[DataUpdateCoordinator]: """Get all DataUpdateCoordinator objects related to a particular API key.""" return [ coordinator for entry_id, coordinator in hass.data[DOMAIN].items() if (entry := hass.config_entries.async_get_entry(entry_id)) and entry.data.get(CONF_API_KEY) == api_key ]
Generate a unique ID from a geography dict.
def async_get_geography_id(geography_dict: Mapping[str, Any]) -> str: """Generate a unique ID from a geography dict.""" if CONF_CITY in geography_dict: return ", ".join( ( geography_dict[CONF_CITY], geography_dict[CONF_STATE], geography_dict[CONF_COUNTRY], ) ) return ", ".join( (str(geography_dict[CONF_LATITUDE]), str(geography_dict[CONF_LONGITUDE])) )
Sync the update interval for geography-based data coordinators (by API key).
def async_sync_geo_coordinator_update_intervals( hass: HomeAssistant, api_key: str ) -> None: """Sync the update interval for geography-based data coordinators (by API key).""" coordinators = async_get_cloud_coordinators_by_api_key(hass, api_key) if not coordinators: return update_interval = async_get_cloud_api_update_interval( hass, api_key, len(coordinators) ) for coordinator in coordinators: LOGGER.debug( "Updating interval for coordinator: %s, %s", coordinator.name, update_interval, ) coordinator.update_interval = update_interval
Ensure that geography config entries have appropriate properties.
def _standardize_geography_config_entry( hass: HomeAssistant, entry: ConfigEntry ) -> None: """Ensure that geography config entries have appropriate properties.""" entry_updates = {} if not entry.unique_id: # If the config entry doesn't already have a unique ID, set one: entry_updates["unique_id"] = entry.data[CONF_API_KEY] if not entry.options: # If the config entry doesn't already have any options set, set defaults: entry_updates["options"] = {CONF_SHOW_ON_MAP: True} if entry.data.get(CONF_INTEGRATION_TYPE) not in [ INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ]: # If the config entry data doesn't contain an integration type that we know # about, infer it from the data we have: entry_updates["data"] = {**entry.data} if CONF_CITY in entry.data: entry_updates["data"][CONF_INTEGRATION_TYPE] = ( INTEGRATION_TYPE_GEOGRAPHY_NAME ) else: entry_updates["data"][CONF_INTEGRATION_TYPE] = ( INTEGRATION_TYPE_GEOGRAPHY_COORDS ) if not entry_updates: return hass.config_entries.async_update_entry(entry, **entry_updates)
Return the correct AQI locale based on settings data.
def async_get_aqi_locale(settings: dict[str, Any]) -> str: """Return the correct AQI locale based on settings data.""" if settings["is_aqi_usa"]: return "aqi_us" return "aqi_cn"
Convert MAC address to short address.
def short_mac(addr: str) -> str: """Convert MAC address to short address.""" return addr.replace(":", "")[-4:].upper()
Return dict with IDs.
def gather_ids(api_data: dict[str, Any]) -> dict[str, Any]: """Return dict with IDs.""" ids: dict[str, Any] = {} dev_idx = 1 for dev_id in api_data[RAW_DEVICES_STATUS]: if dev_id not in ids: ids[dev_id] = f"device{dev_idx}" dev_idx += 1 group_idx = 1 inst_idx = 1 for inst_id, inst_data in api_data[RAW_INSTALLATIONS].items(): if inst_id not in ids: ids[inst_id] = f"installation{inst_idx}" inst_idx += 1 for group in inst_data[API_GROUPS]: group_id = group[API_GROUP_ID] if group_id not in ids: ids[group_id] = f"group{group_idx}" group_idx += 1 ws_idx = 1 for ws_id in api_data[RAW_WEBSERVERS]: if ws_id not in ids: ids[ws_id] = f"webserver{ws_idx}" ws_idx += 1 return ids
Redact sensitive keys in a dict.
def redact_keys(data: Any, ids: dict[str, Any]) -> Any: """Redact sensitive keys in a dict.""" if not isinstance(data, (Mapping, list)): return data if isinstance(data, list): return [redact_keys(val, ids) for val in data] redacted = {**data} keys = list(redacted) for key in keys: if key in ids: redacted[ids[key]] = redacted.pop(key) elif isinstance(redacted[key], Mapping): redacted[key] = redact_keys(redacted[key], ids) elif isinstance(redacted[key], list): redacted[key] = [redact_keys(item, ids) for item in redacted[key]] return redacted
Redact sensitive values in a dict.
def redact_values(data: Any, ids: dict[str, Any]) -> Any: """Redact sensitive values in a dict.""" if not isinstance(data, (Mapping, list)): if data in ids: return ids[data] return data if isinstance(data, list): return [redact_values(val, ids) for val in data] redacted = {**data} for key, value in redacted.items(): if value is None: continue if isinstance(value, Mapping): redacted[key] = redact_values(value, ids) elif isinstance(value, list): redacted[key] = [redact_values(item, ids) for item in value] elif value in ids: redacted[key] = ids[value] return redacted
Redact sensitive data.
def redact_all( data: dict[str, Any], ids: dict[str, Any], to_redact: list[str] ) -> dict[str, Any]: """Redact sensitive data.""" _data = redact_keys(data, ids) _data = redact_values(_data, ids) return async_redact_data(_data, to_redact)
Describe group on off states.
def async_describe_on_off_states( hass: HomeAssistant, registry: "GroupIntegrationRegistry" ) -> None: """Describe group on off states.""" registry.exclude_domain(DOMAIN)
Remove stale devices from device registry.
def remove_stale_devices( hass: HomeAssistant, config_entry: ConfigEntry, devices: list[dict] ) -> None: """Remove stale devices from device registry.""" device_registry = dr.async_get(hass) device_entries = dr.async_entries_for_config_entry( device_registry, config_entry.entry_id ) all_device_ids = {f"{door['device_id']}-{door['door_number']}" for door in devices} for device_entry in device_entries: device_id: str | None = None for identifier in device_entry.identifiers: if identifier[0] == DOMAIN: device_id = identifier[1] break if device_id is None or device_id not in all_device_ids: # If device_id is None an invalid device entry was found for this config entry. # If the device_id is not in existing device ids it's a stale device entry. # Remove config entry from this device entry in either case. device_registry.async_update_device( device_entry.id, remove_config_entry_id=config_entry.entry_id )
Convert necessary keys to int. Since ConfigFlow inputs of type int cannot default to an empty string, we collect the values below as strings and then convert them to ints.
def _fix_input_types(zone_input: dict[str, Any]) -> dict[str, Any]: """Convert necessary keys to int. Since ConfigFlow inputs of type int cannot default to an empty string, we collect the values below as strings and then convert them to ints. """ for key in (CONF_ZONE_LOOP, CONF_RELAY_ADDR, CONF_RELAY_CHAN): if key in zone_input: zone_input[key] = int(zone_input[key]) return zone_input
Determine if entry has already been added to HA.
def _device_already_added( current_entries: list[ConfigEntry], user_input: dict[str, Any], protocol: str | None ) -> bool: """Determine if entry has already been added to HA.""" user_host = user_input.get(CONF_HOST) user_port = user_input.get(CONF_PORT) user_path = user_input.get(CONF_DEVICE_PATH) user_baud = user_input.get(CONF_DEVICE_BAUD) for entry in current_entries: entry_host = entry.data.get(CONF_HOST) entry_port = entry.data.get(CONF_PORT) entry_path = entry.data.get(CONF_DEVICE_PATH) entry_baud = entry.data.get(CONF_DEVICE_BAUD) if ( protocol == PROTOCOL_SOCKET and user_host == entry_host and user_port == entry_port ): return True if ( protocol == PROTOCOL_SERIAL and user_baud == entry_baud and user_path == entry_path ): return True return False
Create a function to test a device condition.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" if config[CONF_TYPE] == CONDITION_TRIGGERED: state = STATE_ALARM_TRIGGERED elif config[CONF_TYPE] == CONDITION_DISARMED: state = STATE_ALARM_DISARMED elif config[CONF_TYPE] == CONDITION_ARMED_HOME: state = STATE_ALARM_ARMED_HOME elif config[CONF_TYPE] == CONDITION_ARMED_AWAY: state = STATE_ALARM_ARMED_AWAY elif config[CONF_TYPE] == CONDITION_ARMED_NIGHT: state = STATE_ALARM_ARMED_NIGHT elif config[CONF_TYPE] == CONDITION_ARMED_VACATION: state = STATE_ALARM_ARMED_VACATION elif config[CONF_TYPE] == CONDITION_ARMED_CUSTOM_BYPASS: state = STATE_ALARM_ARMED_CUSTOM_BYPASS registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" return condition.state(hass, entity_id, state) return test_is_state
Describe group on off states.
def async_describe_on_off_states( hass: HomeAssistant, registry: "GroupIntegrationRegistry" ) -> None: """Describe group on off states.""" registry.on_off_states( DOMAIN, { STATE_ON, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_HOME, STATE_ALARM_ARMED_NIGHT, STATE_ALARM_ARMED_VACATION, STATE_ALARM_TRIGGERED, }, STATE_ON, STATE_OFF, )
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" if old_state != new_state: return True old_attrs_s = set( {k: v for k, v in old_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) new_attrs_s = set( {k: v for k, v in new_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) changed_attrs: set[str] = {item[0] for item in old_attrs_s ^ new_attrs_s} if changed_attrs: return True # no significant attribute change detected return False
Translate the unit of measurement to an Alexa Global Catalog keyword.
def get_resource_by_unit_of_measurement(entity: State) -> str: """Translate the unit of measurement to an Alexa Global Catalog keyword.""" unit: str = entity.attributes.get("unit_of_measurement", "preset") return UNIT_TO_CATALOG_TAG.get(unit, AlexaGlobalCatalog.SETTING_PRESET)
Redact lwa_params.
def async_redact_lwa_params(lwa_params: dict[str, str]) -> dict[str, str]: """Redact lwa_params.""" return async_redact_data(lwa_params, TO_REDACT_LWA)
React auth data.
def async_redact_auth_data(mapping: Mapping[Any, Any]) -> dict[str, str]: """React auth data.""" return async_redact_data(mapping, TO_REDACT_AUTH)
Return all entities that are supported by Alexa.
def async_get_entities( hass: HomeAssistant, config: AbstractConfig ) -> list[AlexaEntity]: """Return all entities that are supported by Alexa.""" entities: list[AlexaEntity] = [] for state in hass.states.async_all(): if state.entity_id in CLOUD_NEVER_EXPOSED_ENTITIES: continue if state.domain not in ENTITY_ADAPTERS: continue try: alexa_entity = ENTITY_ADAPTERS[state.domain](hass, config, state) interfaces = list(alexa_entity.interfaces()) except Exception: # pylint: disable=broad-except _LOGGER.exception("Unable to serialize %s for discovery", state.entity_id) else: if not interfaces: continue entities.append(alexa_entity) return entities
Activate Alexa component.
def async_setup(hass: HomeAssistant, flash_briefing_config: ConfigType) -> None: """Activate Alexa component.""" hass.http.register_view(AlexaFlashBriefingView(hass, flash_briefing_config))
Get temperature from Temperature object in requested unit.
def temperature_from_object( hass: ha.HomeAssistant, temp_obj: dict[str, Any], interval: bool = False ) -> float: """Get temperature from Temperature object in requested unit.""" to_unit = hass.config.units.temperature_unit from_unit = UnitOfTemperature.CELSIUS temp = float(temp_obj["value"]) if temp_obj["scale"] == "FAHRENHEIT": from_unit = UnitOfTemperature.FAHRENHEIT elif temp_obj["scale"] == "KELVIN" and not interval: # convert to Celsius if absolute temperature temp -= 273.15 if interval: return TemperatureConverter.convert_interval(temp, from_unit, to_unit) return TemperatureConverter.convert(temp, from_unit, to_unit)
Activate Alexa component.
def async_setup(hass: HomeAssistant) -> None: """Activate Alexa component.""" hass.http.register_view(AlexaIntentsView)
Return an Alexa response that will speak the error message.
def intent_error_response( hass: HomeAssistant, message: dict[str, Any], error: str ) -> dict[str, Any]: """Return an Alexa response that will speak the error message.""" alexa_intent_info = message["request"].get("intent") alexa_response = AlexaIntentResponse(hass, alexa_intent_info) alexa_response.add_speech(SpeechType.plaintext, error) return alexa_response.as_dict()
Check slot request for synonym resolutions.
def resolve_slot_data(key: str, request: dict[str, Any]) -> dict[str, str]: """Check slot request for synonym resolutions.""" # Default to the spoken slot value if more than one or none are found. Always # passes the id and name of the nearest possible slot resolution. For # reference to the request object structure, see the Alexa docs: # https://tinyurl.com/ybvm7jhs resolved_data: dict[str, Any] = {} resolved_data["value"] = request["value"] resolved_data["id"] = "" if ( "resolutions" in request and "resolutionsPerAuthority" in request["resolutions"] and len(request["resolutions"]["resolutionsPerAuthority"]) >= 1 ): # Extract all of the possible values from each authority with a # successful match possible_values = [] for entry in request["resolutions"]["resolutionsPerAuthority"]: if entry["status"]["code"] != SYN_RESOLUTION_MATCH: continue possible_values.extend([item["value"] for item in entry["values"]]) # Always set id if available, otherwise an empty string is used as id if len(possible_values) >= 1: # Set ID if available if "id" in possible_values[0]: resolved_data["id"] = possible_values[0]["id"] # If there is only one match use the resolved value, otherwise the # resolution cannot be determined, so use the spoken slot value and empty string as id if len(possible_values) == 1: resolved_data["value"] = possible_values[0]["name"] else: _LOGGER.debug( "Found multiple synonym resolutions for slot value: {%s: %s}", key, resolved_data["value"], ) return resolved_data
Describe logbook events.
def async_describe_events( hass: HomeAssistant, async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None], ) -> None: """Describe logbook events.""" @callback def async_describe_logbook_event(event: Event) -> dict[str, Any]: """Describe a logbook event.""" data = event.data if entity_id := data["request"].get("entity_id"): state = hass.states.get(entity_id) name = state.name if state else entity_id message = ( "sent command" f" {data['request']['namespace']}/{data['request']['name']} for {name}" ) else: message = ( f"sent command {data['request']['namespace']}/{data['request']['name']}" ) return { LOGBOOK_ENTRY_NAME: "Amazon Alexa", LOGBOOK_ENTRY_MESSAGE: message, LOGBOOK_ENTRY_ENTITY_ID: entity_id, } async_describe_event(DOMAIN, EVENT_ALEXA_SMART_HOME, async_describe_logbook_event)
Set up the Alpha Vantage sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Alpha Vantage sensor.""" api_key: str = config[CONF_API_KEY] symbols: list[dict[str, str]] = config.get(CONF_SYMBOLS, []) conversions: list[dict[str, str]] = config.get(CONF_FOREIGN_EXCHANGE, []) if not symbols and not conversions: msg = "No symbols or currencies configured." persistent_notification.create(hass, msg, "Sensor alpha_vantage") _LOGGER.warning(msg) return timeseries = TimeSeries(key=api_key) dev: list[SensorEntity] = [] for symbol in symbols: try: _LOGGER.debug("Configuring timeseries for symbols: %s", symbol[CONF_SYMBOL]) timeseries.get_intraday(symbol[CONF_SYMBOL]) except ValueError: _LOGGER.error("API Key is not valid or symbol '%s' not known", symbol) dev.append(AlphaVantageSensor(timeseries, symbol)) forex = ForeignExchange(key=api_key) for conversion in conversions: from_cur = conversion.get(CONF_FROM) to_cur = conversion.get(CONF_TO) try: _LOGGER.debug("Configuring forex %s - %s", from_cur, to_cur) forex.get_currency_exchange_rate(from_currency=from_cur, to_currency=to_cur) except ValueError as error: _LOGGER.error( "API Key is not valid or currencies '%s'/'%s' not known", from_cur, to_cur, ) _LOGGER.debug(str(error)) dev.append(AlphaVantageForeignExchange(forex, conversion)) add_entities(dev, True) _LOGGER.debug("Setup completed")
Set up Amazon Polly speech component.
def get_engine( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> Provider | None: """Set up Amazon Polly speech component.""" output_format = config[CONF_OUTPUT_FORMAT] sample_rate = config.get(CONF_SAMPLE_RATE, DEFAULT_SAMPLE_RATES[output_format]) if sample_rate not in SUPPORTED_SAMPLE_RATES_MAP[output_format]: _LOGGER.error( "%s is not a valid sample rate for %s", sample_rate, output_format ) return None config[CONF_SAMPLE_RATE] = sample_rate profile: str | None = config.get(CONF_PROFILE_NAME) if profile is not None: boto3.setup_default_session(profile_name=profile) aws_config = { CONF_REGION: config[CONF_REGION], CONF_ACCESS_KEY_ID: config.get(CONF_ACCESS_KEY_ID), CONF_SECRET_ACCESS_KEY: config.get(CONF_SECRET_ACCESS_KEY), "config": botocore.config.Config( connect_timeout=AWS_CONF_CONNECT_TIMEOUT, read_timeout=AWS_CONF_READ_TIMEOUT, max_pool_connections=AWS_CONF_MAX_POOL_CONNECTIONS, ), } del config[CONF_REGION] del config[CONF_ACCESS_KEY_ID] del config[CONF_SECRET_ACCESS_KEY] polly_client = boto3.client("polly", **aws_config) supported_languages: list[str] = [] all_voices: dict[str, dict[str, str]] = {} all_voices_req = polly_client.describe_voices() for voice in all_voices_req.get("Voices", []): voice_id: str | None = voice.get("Id") if voice_id is None: continue all_voices[voice_id] = voice language_code: str | None = voice.get("LanguageCode") if language_code is not None and language_code not in supported_languages: supported_languages.append(language_code) return AmazonPollyProvider(polly_client, config, supported_languages, all_voices)
Generate the name to show in the site drop down in the configuration flow.
def generate_site_selector_name(site: Site) -> str: """Generate the name to show in the site drop down in the configuration flow.""" if site.status == SiteStatus.CLOSED: return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return] if site.status == SiteStatus.PENDING: return site.nmi + " (Pending)" # type: ignore[no-any-return] return site.nmi
Deduplicates the list of sites.
def filter_sites(sites: list[Site]) -> list[Site]: """Deduplicates the list of sites.""" filtered: list[Site] = [] filtered_nmi: set[str] = set() for site in sorted(sites, key=lambda site: site.status.value): if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi: filtered.append(site) filtered_nmi.add(site.nmi) return filtered
Return true if the supplied interval is a CurrentInterval.
def is_current(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is a CurrentInterval.""" return isinstance(interval, CurrentInterval)
Return true if the supplied interval is a ForecastInterval.
def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is a ForecastInterval.""" return isinstance(interval, ForecastInterval)
Return true if the supplied interval is on the general channel.
def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the general channel.""" return interval.channel_type == ChannelType.GENERAL
Return true if the supplied interval is on the controlled load channel.
def is_controlled_load( interval: ActualInterval | CurrentInterval | ForecastInterval, ) -> bool: """Return true if the supplied interval is on the controlled load channel.""" return interval.channel_type == ChannelType.CONTROLLED_LOAD
Return true if the supplied interval is on the feed in channel.
def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the feed in channel.""" return interval.channel_type == ChannelType.FEED_IN
Return the snake case versions of descriptor names. Returns None if the name is not recognized.
def normalize_descriptor(descriptor: Descriptor) -> str | None: """Return the snake case versions of descriptor names. Returns None if the name is not recognized.""" if descriptor is None: return None if descriptor.value == "spike": return "spike" if descriptor.value == "high": return "high" if descriptor.value == "neutral": return "neutral" if descriptor.value == "low": return "low" if descriptor.value == "veryLow": return "very_low" if descriptor.value == "extremelyLow": return "extremely_low" if descriptor.value == "negative": return "negative" return None
Return a formatted conversion from cents to dollars.
def format_cents_to_dollars(cents: float) -> float: """Return a formatted conversion from cents to dollars.""" return round(cents / 100, 2)
Return a human readable version of the channel type.
def friendly_channel_type(channel_type: str) -> str: """Return a human readable version of the channel type.""" if channel_type == "controlled_load": return "Controlled Load" if channel_type == "feed_in": return "Feed In" return "General"