response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Set the result of a future unless it is done.
def _set_result_unless_done(future: asyncio.Future[None]) -> None: """Set the result of a future unless it is done.""" if not future.done(): future.set_result(None)
Validate a statistic.
def _async_validate_usage_stat( hass: HomeAssistant, metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]], stat_id: str, allowed_device_classes: Sequence[str], allowed_units: Mapping[str, Sequence[str]], unit_error: str, issues: ValidationIssues, ) -> None: """Validate a statistic.""" if stat_id not in metadata: issues.add_issue(hass, "statistics_not_defined", stat_id) has_entity_source = valid_entity_id(stat_id) if not has_entity_source: return entity_id = stat_id if not recorder.is_entity_recorded(hass, entity_id): issues.add_issue(hass, "recorder_untracked", entity_id) return if (state := hass.states.get(entity_id)) is None: issues.add_issue(hass, "entity_not_defined", entity_id) return if state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN): issues.add_issue(hass, "entity_unavailable", entity_id, state.state) return try: current_value: float | None = float(state.state) except ValueError: issues.add_issue(hass, "entity_state_non_numeric", entity_id, state.state) return if current_value is not None and current_value < 0: issues.add_issue(hass, "entity_negative_state", entity_id, current_value) device_class = state.attributes.get(ATTR_DEVICE_CLASS) if device_class not in allowed_device_classes: issues.add_issue( hass, "entity_unexpected_device_class", entity_id, device_class ) else: unit = state.attributes.get("unit_of_measurement") if device_class and unit not in allowed_units.get(device_class, []): issues.add_issue(hass, unit_error, entity_id, unit) state_class = state.attributes.get(sensor.ATTR_STATE_CLASS) allowed_state_classes = [ sensor.SensorStateClass.MEASUREMENT, sensor.SensorStateClass.TOTAL, sensor.SensorStateClass.TOTAL_INCREASING, ] if state_class not in allowed_state_classes: issues.add_issue(hass, "entity_unexpected_state_class", entity_id, state_class) if ( state_class == sensor.SensorStateClass.MEASUREMENT and sensor.ATTR_LAST_RESET not in state.attributes ): issues.add_issue( hass, "entity_state_class_measurement_no_last_reset", entity_id )
Validate that the price entity is correct.
def _async_validate_price_entity( hass: HomeAssistant, entity_id: str, issues: ValidationIssues, allowed_units: tuple[str, ...], unit_error: str, ) -> None: """Validate that the price entity is correct.""" if (state := hass.states.get(entity_id)) is None: issues.add_issue(hass, "entity_not_defined", entity_id) return try: float(state.state) except ValueError: issues.add_issue(hass, "entity_state_non_numeric", entity_id, state.state) return unit = state.attributes.get("unit_of_measurement") if unit is None or not unit.endswith(allowed_units): issues.add_issue(hass, unit_error, entity_id, unit)
Validate that the cost stat is correct.
def _async_validate_cost_stat( hass: HomeAssistant, metadata: dict[str, tuple[int, recorder.models.StatisticMetaData]], stat_id: str, issues: ValidationIssues, ) -> None: """Validate that the cost stat is correct.""" if stat_id not in metadata: issues.add_issue(hass, "statistics_not_defined", stat_id) has_entity = valid_entity_id(stat_id) if not has_entity: return if not recorder.is_entity_recorded(hass, stat_id): issues.add_issue(hass, "recorder_untracked", stat_id) if (state := hass.states.get(stat_id)) is None: issues.add_issue(hass, "entity_not_defined", stat_id) return state_class = state.attributes.get("state_class") supported_state_classes = [ sensor.SensorStateClass.MEASUREMENT, sensor.SensorStateClass.TOTAL, sensor.SensorStateClass.TOTAL_INCREASING, ] if state_class not in supported_state_classes: issues.add_issue(hass, "entity_unexpected_state_class", stat_id, state_class) if ( state_class == sensor.SensorStateClass.MEASUREMENT and sensor.ATTR_LAST_RESET not in state.attributes ): issues.add_issue(hass, "entity_state_class_measurement_no_last_reset", stat_id)
Validate that the auto generated cost entity is correct.
def _async_validate_auto_generated_cost_entity( hass: HomeAssistant, energy_entity_id: str, issues: ValidationIssues ) -> None: """Validate that the auto generated cost entity is correct.""" if energy_entity_id not in hass.data[DOMAIN]["cost_sensors"]: # The cost entity has not been setup return cost_entity_id = hass.data[DOMAIN]["cost_sensors"][energy_entity_id] if not recorder.is_entity_recorded(hass, cost_entity_id): issues.add_issue(hass, "recorder_untracked", cost_entity_id)
Set up the energy websocket API.
def async_setup(hass: HomeAssistant) -> None: """Set up the energy websocket API.""" websocket_api.async_register_command(hass, ws_get_prefs) websocket_api.async_register_command(hass, ws_save_prefs) websocket_api.async_register_command(hass, ws_info) websocket_api.async_register_command(hass, ws_validate) websocket_api.async_register_command(hass, ws_solar_forecast) websocket_api.async_register_command(hass, ws_get_fossil_energy_consumption)
Decorate a function to pass in a manager.
def _ws_with_manager( func: Any, ) -> websocket_api.WebSocketCommandHandler: """Decorate a function to pass in a manager.""" @websocket_api.async_response @functools.wraps(func) async def with_manager( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any], ) -> None: manager = await async_get_manager(hass) result = func(hass, connection, msg, manager) if asyncio.iscoroutine(result): await result return with_manager
Handle get prefs command.
def ws_get_prefs( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict, manager: EnergyManager, ) -> None: """Handle get prefs command.""" if manager.data is None: connection.send_error(msg["id"], websocket_api.ERR_NOT_FOUND, "No prefs") return connection.send_result(msg["id"], manager.data)
Get the gas price for a given hour. Args: data: The data object. hours: The number of hours to add to the current time. Returns: The gas market price value.
def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: """Get the gas price for a given hour. Args: data: The data object. hours: The number of hours to add to the current time. Returns: The gas market price value. """ if not data.gas_today: return None return data.gas_today.price_at_time( data.gas_today.utcnow() + timedelta(hours=hours) )
Return the gas value. Args: data: The data object. hours: The number of hours to add to the current time. Returns: The gas market price value.
def get_gas_price(data: EnergyZeroData, hours: int) -> float | None: """Return the gas value. Args: data: The data object. hours: The number of hours to add to the current time. Returns: The gas market price value. """ if data.gas_today is None: return None return data.gas_today.price_at_time( data.gas_today.utcnow() + timedelta(hours=hours) )
Get date.
def __get_date(date_input: str | None) -> date | datetime: """Get date.""" if not date_input: return dt_util.now().date() if value := dt_util.parse_datetime(date_input): return value raise ServiceValidationError( translation_domain=DOMAIN, translation_key="invalid_date", translation_placeholders={ "date": date_input, }, )
Serialize prices.
def __serialize_prices(prices: Electricity | Gas) -> ServiceResponse: """Serialize prices.""" return { "prices": [ { key: str(value) if isinstance(value, datetime) else value for key, value in timestamp_price.items() } for timestamp_price in prices.timestamp_prices ] }
Get the coordinator from the entry.
def __get_coordinator( hass: HomeAssistant, call: ServiceCall ) -> EnergyZeroDataUpdateCoordinator: """Get the coordinator from the entry.""" entry_id: str = call.data[ATTR_CONFIG_ENTRY] entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="invalid_config_entry", translation_placeholders={ "config_entry": entry_id, }, ) if entry.state != ConfigEntryState.LOADED: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="unloaded_config_entry", translation_placeholders={ "config_entry": entry.title, }, ) coordinator: EnergyZeroDataUpdateCoordinator = hass.data[DOMAIN][entry_id] return coordinator
Set up EnergyZero services.
def async_setup_services(hass: HomeAssistant) -> None: """Set up EnergyZero services.""" hass.services.async_register( DOMAIN, GAS_SERVICE_NAME, partial(__get_prices, hass=hass, price_type=PriceType.GAS), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, ) hass.services.async_register( DOMAIN, ENERGY_SERVICE_NAME, partial(__get_prices, hass=hass, price_type=PriceType.ENERGY), schema=SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, )
Set up the Binary Sensor platform for EnOcean.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Binary Sensor platform for EnOcean.""" dev_id: list[int] = config[CONF_ID] dev_name: str = config[CONF_NAME] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) add_entities([EnOceanBinarySensor(dev_id, dev_name, device_class)])
Return a list of candidate paths for USB ENOcean dongles. This method is currently a bit simplistic, it may need to be improved to support more configurations and OS.
def detect(): """Return a list of candidate paths for USB ENOcean dongles. This method is currently a bit simplistic, it may need to be improved to support more configurations and OS. """ globs_to_test = ["/dev/tty*FTOA2PV*", "/dev/serial/by-id/*EnOcean*"] found_paths = [] for current_glob in globs_to_test: found_paths.extend(glob.glob(current_glob)) return found_paths
Return True if the provided path points to a valid serial port, False otherwise.
def validate_path(path: str): """Return True if the provided path points to a valid serial port, False otherwise.""" try: # Creating the serial communicator will raise an exception # if it cannot connect SerialCommunicator(port=path) except serial.SerialException as exception: _LOGGER.warning("Dongle path %s is invalid: %s", path, str(exception)) return False return True
Set up the EnOcean light platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the EnOcean light platform.""" sender_id: list[int] = config[CONF_SENDER_ID] dev_name: str = config[CONF_NAME] dev_id: list[int] = config[CONF_ID] add_entities([EnOceanLight(sender_id, dev_id, dev_name)])
Set up an EnOcean sensor device.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up an EnOcean sensor device.""" dev_id: list[int] = config[CONF_ID] dev_name: str = config[CONF_NAME] sensor_type: str = config[CONF_DEVICE_CLASS] entities: list[EnOceanSensor] = [] if sensor_type == SENSOR_TYPE_TEMPERATURE: temp_min: int = config[CONF_MIN_TEMP] temp_max: int = config[CONF_MAX_TEMP] range_from: int = config[CONF_RANGE_FROM] range_to: int = config[CONF_RANGE_TO] entities = [ EnOceanTemperatureSensor( dev_id, dev_name, SENSOR_DESC_TEMPERATURE, scale_min=temp_min, scale_max=temp_max, range_from=range_from, range_to=range_to, ) ] elif sensor_type == SENSOR_TYPE_HUMIDITY: entities = [EnOceanHumiditySensor(dev_id, dev_name, SENSOR_DESC_HUMIDITY)] elif sensor_type == SENSOR_TYPE_POWER: entities = [EnOceanPowerSensor(dev_id, dev_name, SENSOR_DESC_POWER)] elif sensor_type == SENSOR_TYPE_WINDOWHANDLE: entities = [EnOceanWindowHandle(dev_id, dev_name, SENSOR_DESC_WINDOWHANDLE)] add_entities(entities)
Generate a valid unique id.
def generate_unique_id(dev_id: list[int], channel: int) -> str: """Generate a valid unique id.""" return f"{combine_hex(dev_id)}-{channel}"
Migrate old unique ids to new unique ids.
def _migrate_to_new_unique_id(hass: HomeAssistant, dev_id, channel) -> None: """Migrate old unique ids to new unique ids.""" old_unique_id = f"{combine_hex(dev_id)}" ent_reg = er.async_get(hass) entity_id = ent_reg.async_get_entity_id(Platform.SWITCH, DOMAIN, old_unique_id) if entity_id is not None: new_unique_id = generate_unique_id(dev_id, channel) try: ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) except ValueError: LOGGER.warning( "Skip migration of id [%s] to [%s] because it already exists", old_unique_id, new_unique_id, ) else: LOGGER.debug( "Migrating unique_id from [%s] to [%s]", old_unique_id, new_unique_id, )
Get the time in minutes from a timestamp.
def due_in_minutes(timestamp: datetime) -> int: """Get the time in minutes from a timestamp.""" if timestamp is None: return None diff = timestamp - dt_util.now() return int(diff.total_seconds() / 60)
Calculate unique ID.
def _calculate_unique_id(config_entry_unique_id: str | None, hourly: bool) -> str: """Calculate unique ID.""" return f"{config_entry_unique_id}{'-hourly' if hourly else '-daily'}"
Build the forecast array.
def get_forecast(ec_data, hourly) -> list[Forecast] | None: """Build the forecast array.""" forecast_array: list[Forecast] = [] if not hourly: if not (half_days := ec_data.daily_forecasts): return None today: Forecast = { ATTR_FORECAST_TIME: dt_util.now().isoformat(), ATTR_FORECAST_CONDITION: icon_code_to_condition( int(half_days[0]["icon_code"]) ), ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( half_days[0]["precip_probability"] ), } if half_days[0]["temperature_class"] == "high": today.update( { ATTR_FORECAST_NATIVE_TEMP: int(half_days[0]["temperature"]), ATTR_FORECAST_NATIVE_TEMP_LOW: int(half_days[1]["temperature"]), } ) half_days = half_days[2:] else: today.update( { ATTR_FORECAST_NATIVE_TEMP: None, ATTR_FORECAST_NATIVE_TEMP_LOW: int(half_days[0]["temperature"]), } ) half_days = half_days[1:] forecast_array.append(today) for day, high, low in zip( range(1, 6), range(0, 9, 2), range(1, 10, 2), strict=False ): forecast_array.append( { ATTR_FORECAST_TIME: ( dt_util.now() + datetime.timedelta(days=day) ).isoformat(), ATTR_FORECAST_NATIVE_TEMP: int(half_days[high]["temperature"]), ATTR_FORECAST_NATIVE_TEMP_LOW: int(half_days[low]["temperature"]), ATTR_FORECAST_CONDITION: icon_code_to_condition( int(half_days[high]["icon_code"]) ), ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( half_days[high]["precip_probability"] ), } ) else: forecast_array.extend( { ATTR_FORECAST_TIME: hour["period"].isoformat(), ATTR_FORECAST_NATIVE_TEMP: int(hour["temperature"]), ATTR_FORECAST_CONDITION: icon_code_to_condition(int(hour["icon_code"])), ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( hour["precip_probability"] ), } for hour in ec_data.hourly_forecasts ) return forecast_array
Return the condition corresponding to an icon code.
def icon_code_to_condition(icon_code): """Return the condition corresponding to an icon code.""" for condition, codes in ICON_CONDITION_MAP.items(): if icon_code in codes: return condition return None
Build and return the device info for EC.
def device_info(config_entry: ConfigEntry) -> DeviceInfo: """Build and return the device info for EC.""" return DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, config_entry.entry_id)}, manufacturer="Environment Canada", name=config_entry.title, configuration_url="https://weather.gc.ca/", )
Set up the ephember thermostat.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the ephember thermostat.""" username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) try: ember = EphEmber(username, password) zones = ember.get_zones() for zone in zones: add_entities([EphEmberThermostat(ember, zone)]) except RuntimeError: _LOGGER.error("Cannot connect to EphEmber") return return
Return a CalendarEvent from an API event.
def _get_calendar_event(event: dict[str, Any]) -> CalendarEvent: """Return a CalendarEvent from an API event.""" return CalendarEvent( summary=event["title"], start=event["discount_start_at"], end=event["discount_end_at"], description=f"{event['description']}\n\n{event['url']}", )
Return a CalendarEvent from an API event.
def _are_date_range_overlapping(range1: DateRange, range2: DateRange) -> bool: """Return a CalendarEvent from an API event.""" latest_start = max(range1.start, range2.start) earliest_end = min(range1.end, range2.end) delta = (earliest_end - latest_start).days + 1 overlap = max(0, delta) return overlap > 0
Get default language code based on Home Assistant config.
def get_default_language(hass: HomeAssistant) -> str | None: """Get default language code based on Home Assistant config.""" language_code = f"{hass.config.language}-{hass.config.country}" if language_code in SUPPORTED_LANGUAGES: return language_code if hass.config.language in SUPPORTED_LANGUAGES: return hass.config.language return None
Format raw API game data for Home Assistant users.
def format_game_data(raw_game_data: dict[str, Any], language: str) -> dict[str, Any]: """Format raw API game data for Home Assistant users.""" img_portrait = None img_landscape = None for image in raw_game_data["keyImages"]: if image["type"] == "OfferImageTall": img_portrait = image["url"] if image["type"] == "OfferImageWide": img_landscape = image["url"] current_promotions = raw_game_data["promotions"]["promotionalOffers"] upcoming_promotions = raw_game_data["promotions"]["upcomingPromotionalOffers"] promotion_data = {} if ( current_promotions and raw_game_data["price"]["totalPrice"]["discountPrice"] == 0 ): promotion_data = current_promotions[0]["promotionalOffers"][0] else: promotion_data = (current_promotions or upcoming_promotions)[0][ "promotionalOffers" ][0] return { "title": raw_game_data["title"].replace("\xa0", " "), "description": raw_game_data["description"].strip().replace("\xa0", " "), "released_at": dt_util.parse_datetime(raw_game_data["effectiveDate"]), "original_price": raw_game_data["price"]["totalPrice"]["fmtPrice"][ "originalPrice" ].replace("\xa0", " "), "publisher": raw_game_data["seller"]["name"], "url": get_game_url(raw_game_data, language), "img_portrait": img_portrait, "img_landscape": img_landscape, "discount_type": ("free" if is_free_game(raw_game_data) else "discount") if promotion_data else None, "discount_start_at": dt_util.parse_datetime(promotion_data["startDate"]) if promotion_data else None, "discount_end_at": dt_util.parse_datetime(promotion_data["endDate"]) if promotion_data else None, }
Format raw API game data for Home Assistant users.
def get_game_url(raw_game_data: dict[str, Any], language: str) -> str: """Format raw API game data for Home Assistant users.""" url_bundle_or_product = "bundles" if raw_game_data["offerType"] == "BUNDLE" else "p" url_slug: str | None = None try: url_slug = raw_game_data["offerMappings"][0]["pageSlug"] except Exception: # pylint: disable=broad-except with contextlib.suppress(Exception): url_slug = raw_game_data["catalogNs"]["mappings"][0]["pageSlug"] if not url_slug: url_slug = raw_game_data["urlSlug"] return f"https://store.epicgames.com/{language}/{url_bundle_or_product}/{url_slug}"
Return if the game is free or will be free.
def is_free_game(game: dict[str, Any]) -> bool: """Return if the game is free or will be free.""" return ( # Current free game(s) game["promotions"]["promotionalOffers"] and game["promotions"]["promotionalOffers"][0]["promotionalOffers"][0][ "discountSetting" ]["discountPercentage"] == 0 and # Checking current price, maybe not necessary game["price"]["totalPrice"]["discountPrice"] == 0 ) or ( # Upcoming free game(s) game["promotions"]["upcomingPromotionalOffers"] and game["promotions"]["upcomingPromotionalOffers"][0]["promotionalOffers"][0][ "discountSetting" ]["discountPercentage"] == 0 )
Return whether or not given value is a valid MAC address.
def validate_mac(mac: str) -> bool: """Return whether or not given value is a valid MAC address.""" return bool( mac and len(mac) == 17 and mac.count(":") == 5 and all(int(part, 16) < 256 for part in mac.split(":") if part) )
Cancel all the callbacks on unload.
def _async_unload(unload_callbacks: list[CALLBACK_TYPE]) -> None: """Cancel all the callbacks on unload.""" for callback in unload_callbacks: callback()
Connect scanner.
def async_connect_scanner( hass: HomeAssistant, entry_data: RuntimeEntryData, cli: APIClient, device_info: DeviceInfo, cache: ESPHomeBluetoothCache, ) -> CALLBACK_TYPE: """Connect scanner.""" client_data = connect_scanner(cli, device_info, cache, entry_data.available) entry_data.bluetooth_device = client_data.bluetooth_device client_data.disconnect_callbacks = entry_data.disconnect_callbacks scanner = client_data.scanner if TYPE_CHECKING: assert scanner is not None return partial( _async_unload, [ async_register_scanner(hass, scanner), scanner.async_setup(), ], )
Get an instance of the dashboard if set. This is only safe to call after `async_setup` has been completed. It should not be called from the config flow because there is a race where manager can be an asyncio.Event instead of the actual manager because the singleton decorator is not yet done.
def async_get_dashboard(hass: HomeAssistant) -> ESPHomeDashboard | None: """Get an instance of the dashboard if set. This is only safe to call after `async_setup` has been completed. It should not be called from the config flow because there is a race where manager can be an asyncio.Event instead of the actual manager because the singleton decorator is not yet done. """ manager: ESPHomeDashboardManager | None = hass.data.get(KEY_DASHBOARD_MANAGER) return manager.async_get() if manager else None
Update entities of this platform when entities are listed.
def async_static_info_updated( hass: HomeAssistant, entry_data: RuntimeEntryData, platform: entity_platform.EntityPlatform, async_add_entities: AddEntitiesCallback, info_type: type[_InfoT], entity_type: type[_EntityT], state_type: type[_StateT], infos: list[EntityInfo], ) -> None: """Update entities of this platform when entities are listed.""" current_infos = entry_data.info[info_type] new_infos: dict[int, EntityInfo] = {} add_entities: list[_EntityT] = [] for info in infos: if not current_infos.pop(info.key, None): # Create new entity entity = entity_type(entry_data, platform.domain, info, state_type) add_entities.append(entity) new_infos[info.key] = info # Anything still in current_infos is now gone if current_infos: device_info = entry_data.device_info if TYPE_CHECKING: assert device_info is not None entry_data.async_remove_entities( hass, current_infos.values(), device_info.mac_address ) # Then update the actual info entry_data.info[info_type] = new_infos if new_infos: entry_data.async_update_entity_infos(new_infos.values()) if add_entities: # Add entities to Home Assistant async_add_entities(add_entities)
Wrap a state property of an esphome entity. This checks if the state object in the entity is set, and prevents writing NAN values to the Home Assistant state machine.
def esphome_state_property( func: Callable[[_EntityT], _R], ) -> Callable[[_EntityT], _R | None]: """Wrap a state property of an esphome entity. This checks if the state object in the entity is set, and prevents writing NAN values to the Home Assistant state machine. """ @functools.wraps(func) def _wrapper(self: _EntityT) -> _R | None: # pylint: disable-next=protected-access if not self._has_state: return None val = func(self) if isinstance(val, float) and not math.isfinite(val): # Home Assistant doesn't use NaN or inf values in state machine # (not JSON serializable) return None return val return _wrapper
Decorate ESPHome command calls that send commands/make changes to the device. A decorator that wraps the passed in function, catches APIConnectionError errors, and raises a HomeAssistant error instead.
def convert_api_error_ha_error( func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], ) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: """Decorate ESPHome command calls that send commands/make changes to the device. A decorator that wraps the passed in function, catches APIConnectionError errors, and raises a HomeAssistant error instead. """ async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: try: return await func(self, *args, **kwargs) except APIConnectionError as error: raise HomeAssistantError( f"Error communicating with device: {error}" ) from error return handler
Convert absolute mired shift to degrees kelvin. This function rounds the converted value instead of flooring the value as is done in homeassistant.util.color.color_temperature_mired_to_kelvin(). If the value of mired_temperature is less than or equal to zero, return the original value to avoid a divide by zero.
def _mired_to_kelvin(mired_temperature: float) -> int: """Convert absolute mired shift to degrees kelvin. This function rounds the converted value instead of flooring the value as is done in homeassistant.util.color.color_temperature_mired_to_kelvin(). If the value of mired_temperature is less than or equal to zero, return the original value to avoid a divide by zero. """ if mired_temperature <= 0: return round(mired_temperature) return round(1000000 / mired_temperature)
Convert an esphome color mode to a HA color mode constant. Choses the color mode that best matches the feature-set.
def _color_mode_to_ha(mode: int) -> str: """Convert an esphome color mode to a HA color mode constant. Choses the color mode that best matches the feature-set. """ candidates = [] for ha_mode, cap_lists in _COLOR_MODE_MAPPING.items(): for caps in cap_lists: if caps == mode: # exact match return ha_mode if (mode & caps) == caps: # all requirements met candidates.append((ha_mode, caps)) if not candidates: return ColorMode.UNKNOWN # choose the color mode with the most bits set candidates.sort(key=lambda key: bin(key[1]).count("1")) return candidates[-1][0]
Filter the given supported color modes. Excluding all values that don't have the requested features.
def _filter_color_modes( supported: list[int], features: LightColorCapability ) -> tuple[int, ...]: """Filter the given supported color modes. Excluding all values that don't have the requested features. """ features_value = features.value return tuple( mode for mode in supported if (mode & features_value) == features_value )
Return the color mode with the least complexity.
def _least_complex_color_mode(color_modes: tuple[int, ...]) -> int: """Return the color mode with the least complexity.""" # popcount with bin() function because it appears # to be the best way: https://stackoverflow.com/a/9831671 color_modes_list = list(color_modes) color_modes_list.sort(key=lambda mode: bin(mode).count("1")) return color_modes_list[0]
Create or delete an the ble_firmware_outdated issue.
def _async_check_firmware_version( hass: HomeAssistant, device_info: EsphomeDeviceInfo, api_version: APIVersion ) -> None: """Create or delete an the ble_firmware_outdated issue.""" # ESPHome device_info.mac_address is the unique_id issue = f"ble_firmware_outdated-{device_info.mac_address}" if ( not device_info.bluetooth_proxy_feature_flags_compat(api_version) # If the device has a project name its up to that project # to tell them about the firmware version update so we don't notify here or (device_info.project_name and device_info.project_name not in PROJECT_URLS) or AwesomeVersion(device_info.esphome_version) >= STABLE_BLE_VERSION ): async_delete_issue(hass, DOMAIN, issue) return async_create_issue( hass, DOMAIN, issue, is_fixable=False, severity=IssueSeverity.WARNING, learn_more_url=PROJECT_URLS.get(device_info.project_name, DEFAULT_URL), translation_key="ble_firmware_outdated", translation_placeholders={ "name": device_info.name, "version": STABLE_BLE_VERSION_STR, }, )
Create or delete an the api_password_deprecated issue.
def _async_check_using_api_password( hass: HomeAssistant, device_info: EsphomeDeviceInfo, has_password: bool ) -> None: """Create or delete an the api_password_deprecated issue.""" # ESPHome device_info.mac_address is the unique_id issue = f"api_password_deprecated-{device_info.mac_address}" if not has_password: async_delete_issue(hass, DOMAIN, issue) return async_create_issue( hass, DOMAIN, issue, is_fixable=False, severity=IssueSeverity.WARNING, learn_more_url="https://esphome.io/components/api.html", translation_key="api_password_deprecated", translation_placeholders={ "name": device_info.name, }, )
Set up device registry feature for a particular config entry.
def _async_setup_device_registry( hass: HomeAssistant, entry: ConfigEntry, entry_data: RuntimeEntryData ) -> str: """Set up device registry feature for a particular config entry.""" device_info = entry_data.device_info if TYPE_CHECKING: assert device_info is not None sw_version = device_info.esphome_version if device_info.compilation_time: sw_version += f" ({device_info.compilation_time})" configuration_url = None if device_info.webserver_port > 0: configuration_url = f"http://{entry.data['host']}:{device_info.webserver_port}" elif dashboard := async_get_dashboard(hass): configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}" manufacturer = "espressif" if device_info.manufacturer: manufacturer = device_info.manufacturer model = device_info.model hw_version = None if device_info.project_name: project_name = device_info.project_name.split(".") manufacturer = project_name[0] model = project_name[1] hw_version = device_info.project_version suggested_area = None if device_info.suggested_area: suggested_area = device_info.suggested_area device_registry = dr.async_get(hass) device_entry = device_registry.async_get_or_create( config_entry_id=entry.entry_id, configuration_url=configuration_url, connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)}, name=entry_data.friendly_name, manufacturer=manufacturer, model=model, sw_version=sw_version, hw_version=hw_version, suggested_area=suggested_area, ) return device_entry.id
Execute a service on a node.
def execute_service( entry_data: RuntimeEntryData, service: UserService, call: ServiceCall ) -> None: """Execute a service on a node.""" entry_data.client.execute_service(service, call.data)
Build a service name for a node.
def build_service_name(device_info: EsphomeDeviceInfo, service: UserService) -> str: """Build a service name for a node.""" return f"{device_info.name.replace('-', '_')}_{service.name}"
Register a service on a node.
def _async_register_service( hass: HomeAssistant, entry_data: RuntimeEntryData, device_info: EsphomeDeviceInfo, service: UserService, ) -> None: """Register a service on a node.""" service_name = build_service_name(device_info, service) schema = {} fields = {} for arg in service.args: if arg.type not in ARG_TYPE_METADATA: _LOGGER.error( "Can't register service %s because %s is of unknown type %s", service_name, arg.name, arg.type, ) return metadata = ARG_TYPE_METADATA[arg.type] schema[vol.Required(arg.name)] = metadata.validator fields[arg.name] = { "name": arg.name, "required": True, "description": metadata.description, "example": metadata.example, "selector": metadata.selector, } hass.services.async_register( DOMAIN, service_name, partial(execute_service, entry_data, service), vol.Schema(schema), ) async_set_service_schema( hass, DOMAIN, service_name, { "description": ( f"Calls the service {service.name} of the node {device_info.name}" ), "fields": fields, }, )
Set up the Etherscan.io sensors.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Etherscan.io sensors.""" address = config.get(CONF_ADDRESS) name = config.get(CONF_NAME) token = config.get(CONF_TOKEN) token_address = config.get(CONF_TOKEN_ADDRESS) if token: token = token.upper() if not name: name = f"{token} Balance" if not name: name = "ETH Balance" add_entities([EtherscanSensor(name, address, token, token_address)], True)
Set up EufyHome bulbs.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up EufyHome bulbs.""" if discovery_info is None: return add_entities([EufyHomeLight(discovery_info)], True)
Set up EufyHome switches.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up EufyHome switches.""" if discovery_info is None: return add_entities([EufyHomeSwitch(discovery_info)], True)
Set up EufyHome devices.
def setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up EufyHome devices.""" if CONF_USERNAME in config[DOMAIN] and CONF_PASSWORD in config[DOMAIN]: data = lakeside.get_devices( config[DOMAIN][CONF_USERNAME], config[DOMAIN][CONF_PASSWORD] ) for device in data: kind = device["type"] if kind not in PLATFORMS: continue discovery.load_platform(hass, PLATFORMS[kind], DOMAIN, device, config) for device_info in config[DOMAIN][CONF_DEVICES]: kind = device_info["type"] if kind not in PLATFORMS: continue device = {} device["address"] = device_info["address"] device["code"] = device_info["access_token"] device["type"] = device_info["type"] device["name"] = device_info["name"] discovery.load_platform(hass, PLATFORMS[kind], DOMAIN, device, config) return True
Return a RGB color as an integer.
def color_rgb_to_int(red: int, green: int, blue: int) -> int: """Return a RGB color as an integer.""" return red * 256 * 256 + green * 256 + blue
Return an RGB tuple from an integer.
def color_int_to_rgb(value: int) -> tuple[int, int, int]: """Return an RGB tuple from an integer.""" return (value >> 16, (value >> 8) & 0xFF, value & 0xFF)
Decorate function to trigger update when function is done.
def update_when_done( func: Callable[Concatenate[_EvilGeniusEntityT, _P], Awaitable[_R]], ) -> Callable[Concatenate[_EvilGeniusEntityT, _P], Coroutine[Any, Any, _R]]: """Decorate function to trigger update when function is done.""" @wraps(func) async def wrapper( self: _EvilGeniusEntityT, *args: _P.args, **kwargs: _P.kwargs ) -> _R: """Wrap function.""" result = await func(self, *args, **kwargs) await self.coordinator.async_request_refresh() return result return wrapper
Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.
def convert_until(status_dict: dict, until_key: str) -> None: """Reformat a dt str from "%Y-%m-%dT%H:%M:%SZ" as local/aware/isoformat.""" if until_key in status_dict and ( # only present for certain modes dt_utc_naive := dt_util.parse_datetime(status_dict[until_key]) ): status_dict[until_key] = dt_util.as_local(dt_utc_naive).isoformat()
Recursively convert a dict's keys to snake_case.
def convert_dict(dictionary: dict[str, Any]) -> dict[str, Any]: """Recursively convert a dict's keys to snake_case.""" def convert_key(key: str) -> str: """Convert a string to snake_case.""" string = re.sub(r"[\-\.\s]", "_", str(key)) return ( (string[0]).lower() + re.sub( r"[A-Z]", lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe] string[1:], ) ) return { (convert_key(k) if isinstance(k, str) else k): ( convert_dict(v) if isinstance(v, dict) else v ) for k, v in dictionary.items() }
Return False if the exception can't be ignored.
def _handle_exception(err: evo.RequestFailed) -> None: """Return False if the exception can't be ignored.""" try: raise err except evo.AuthenticationFailed: _LOGGER.error( ( "Failed to authenticate with the vendor's server. Check your username" " and password. NB: Some special password characters that work" " correctly via the website will not work via the web API. Message" " is: %s" ), err, ) except evo.RequestFailed: if err.status is None: _LOGGER.warning( ( "Unable to connect with the vendor's server. " "Check your network and the vendor's service status page. " "Message is: %s" ), err, ) elif err.status == HTTPStatus.SERVICE_UNAVAILABLE: _LOGGER.warning( "The vendor says their server is currently unavailable. " "Check the vendor's service status page" ) elif err.status == HTTPStatus.TOO_MANY_REQUESTS: _LOGGER.warning( ( "The vendor's API rate limit has been exceeded. " "If this message persists, consider increasing the %s" ), CONF_SCAN_INTERVAL, ) else: raise
Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes.
def setup_service_functions(hass: HomeAssistant, broker: EvoBroker) -> None: """Set up the service handlers for the system/zone operating modes. Not all Honeywell TCC-compatible systems support all operating modes. In addition, each mode will require any of four distinct service schemas. This has to be enumerated before registering the appropriate handlers. It appears that all TCC-compatible systems support the same three zones modes. """ @verify_domain_control(hass, DOMAIN) async def force_refresh(call: ServiceCall) -> None: """Obtain the latest state data via the vendor's RESTful API.""" await broker.async_update() @verify_domain_control(hass, DOMAIN) async def set_system_mode(call: ServiceCall) -> None: """Set the system mode.""" payload = { "unique_id": broker.tcs.systemId, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) @verify_domain_control(hass, DOMAIN) async def set_zone_override(call: ServiceCall) -> None: """Set the zone override (setpoint).""" entity_id = call.data[ATTR_ENTITY_ID] registry = er.async_get(hass) registry_entry = registry.async_get(entity_id) if registry_entry is None or registry_entry.platform != DOMAIN: raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity") if registry_entry.domain != "climate": raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone") payload = { "unique_id": registry_entry.unique_id, "service": call.service, "data": call.data, } async_dispatcher_send(hass, DOMAIN, payload) hass.services.async_register(DOMAIN, SVC_REFRESH_SYSTEM, force_refresh) # Enumerate which operating modes are supported by this system modes = broker.config[SZ_ALLOWED_SYSTEM_MODES] # Not all systems support "AutoWithReset": register this handler only if required if [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_SYSTEM_MODE] == SZ_AUTO_WITH_RESET]: hass.services.async_register(DOMAIN, SVC_RESET_SYSTEM, set_system_mode) system_mode_schemas = [] modes = [m for m in modes if m[SZ_SYSTEM_MODE] != SZ_AUTO_WITH_RESET] # Permanent-only modes will use this schema perm_modes = [m[SZ_SYSTEM_MODE] for m in modes if not m[SZ_CAN_BE_TEMPORARY]] if perm_modes: # any of: "Auto", "HeatingOff": permanent only schema = vol.Schema({vol.Required(ATTR_SYSTEM_MODE): vol.In(perm_modes)}) system_mode_schemas.append(schema) modes = [m for m in modes if m[SZ_CAN_BE_TEMPORARY]] # These modes are set for a number of hours (or indefinitely): use this schema temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == "Duration"] if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_HOURS): vol.All( cv.time_period, vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)), ), } ) system_mode_schemas.append(schema) # These modes are set for a number of days (or indefinitely): use this schema temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == "Period"] if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days schema = vol.Schema( { vol.Required(ATTR_SYSTEM_MODE): vol.In(temp_modes), vol.Optional(ATTR_DURATION_DAYS): vol.All( cv.time_period, vol.Range(min=timedelta(days=1), max=timedelta(days=99)), ), } ) system_mode_schemas.append(schema) if system_mode_schemas: hass.services.async_register( DOMAIN, SVC_SET_SYSTEM_MODE, set_system_mode, schema=vol.Schema(vol.Any(*system_mode_schemas)), ) # The zone modes are consistent across all systems and use the same schema hass.services.async_register( DOMAIN, SVC_RESET_ZONE_OVERRIDE, set_zone_override, schema=RESET_ZONE_OVERRIDE_SCHEMA, ) hass.services.async_register( DOMAIN, SVC_SET_ZONE_OVERRIDE, set_zone_override, schema=SET_ZONE_OVERRIDE_SCHEMA, )
Try to login to EZVIZ cloud account and return token.
def _validate_and_create_auth(data: dict) -> dict[str, Any]: """Try to login to EZVIZ cloud account and return token.""" # Verify cloud credentials by attempting a login request with username and password. # Return login token. ezviz_client = EzvizClient( data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_URL], data.get(CONF_TIMEOUT, DEFAULT_TIMEOUT), ) ezviz_token = ezviz_client.login() return { CONF_SESSION_ID: ezviz_token[CONF_SESSION_ID], CONF_RFSESSION_ID: ezviz_token[CONF_RFSESSION_ID], CONF_URL: ezviz_token["api_url"], CONF_TYPE: ATTR_TYPE_CLOUD, }
Try DESCRIBE on RTSP camera with credentials.
def _test_camera_rtsp_creds(data: dict) -> None: """Try DESCRIBE on RTSP camera with credentials.""" test_rtsp = TestRTSPAuth( data[CONF_IP_ADDRESS], data[CONF_USERNAME], data[CONF_PASSWORD] ) test_rtsp.main()
Get the Facebook notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> FacebookNotificationService: """Get the Facebook notification service.""" return FacebookNotificationService(config[CONF_PAGE_ACCESS_TOKEN])
Log error message.
def log_error(response): """Log error message.""" obj = response.json() error_message = obj["error"]["message"] error_code = obj["error"]["code"] _LOGGER.error( "Error %s : %s (Code %s)", response.status_code, error_message, error_code )
Create a function to test a device condition.
def async_condition_from_config( hass: HomeAssistant, config: ConfigType ) -> condition.ConditionCheckerType: """Create a function to test a device condition.""" registry = er.async_get(hass) entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID]) if config[CONF_TYPE] == "is_on": state = STATE_ON else: state = STATE_OFF @callback def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool: """Test if an entity is a certain state.""" return condition.state(hass, entity_id, state) return test_is_state
Test if state significantly changed.
def async_check_significant_change( hass: HomeAssistant, old_state: str, old_attrs: dict, new_state: str, new_attrs: dict, **kwargs: Any, ) -> bool | None: """Test if state significantly changed.""" if old_state != new_state: return True old_attrs_s = set( {k: v for k, v in old_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) new_attrs_s = set( {k: v for k, v in new_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items() ) changed_attrs: set[str] = {item[0] for item in old_attrs_s ^ new_attrs_s} for attr_name in changed_attrs: if attr_name != ATTR_PERCENTAGE: return True old_attr_value = old_attrs.get(attr_name) new_attr_value = new_attrs.get(attr_name) if new_attr_value is None or not check_valid_float(new_attr_value): # New attribute value is invalid, ignore it continue if old_attr_value is None or not check_valid_float(old_attr_value): # Old attribute value was invalid, we should report again return True if check_absolute_change(old_attr_value, new_attr_value, 1.0): return True # no significant attribute change detected return False
Return if the fans are on based on the statemachine.
def is_on(hass: HomeAssistant, entity_id: str) -> bool: """Return if the fans are on based on the statemachine.""" entity = hass.states.get(entity_id) assert entity return entity.state == STATE_ON
Set up the service for the Fastdotcom integration.
def async_setup_services(hass: HomeAssistant) -> None: """Set up the service for the Fastdotcom integration.""" @callback def collect_coordinator() -> FastdotcomDataUpdateCoordinator: """Collect the coordinator Fastdotcom.""" config_entries = hass.config_entries.async_entries(DOMAIN) if not config_entries: raise HomeAssistantError("No Fast.com config entries found") for config_entry in config_entries: if config_entry.state != ConfigEntryState.LOADED: raise HomeAssistantError(f"{config_entry.title} is not loaded") coordinator: FastdotcomDataUpdateCoordinator = hass.data[DOMAIN][ config_entry.entry_id ] break return coordinator async def async_perform_service(call: ServiceCall) -> None: """Perform a service call to manually run Fastdotcom.""" ir.async_create_issue( hass, DOMAIN, "service_deprecation", breaks_in_ha_version="2024.7.0", is_fixable=True, is_persistent=True, severity=ir.IssueSeverity.WARNING, translation_key="service_deprecation", ) coordinator = collect_coordinator() await coordinator.async_request_refresh() hass.services.async_register( DOMAIN, SERVICE_NAME, async_perform_service, )
Return the FFmpegManager.
def get_ffmpeg_manager(hass: HomeAssistant) -> FFmpegManager: """Return the FFmpegManager.""" if DATA_FFMPEG not in hass.data: raise ValueError("ffmpeg component not initialized") return hass.data[DATA_FFMPEG]
Try to fix errors in the entered url. We know that the url should be in the format http://<HOST>/api/
def _normalize_url(url: str) -> str: """Try to fix errors in the entered url. We know that the url should be in the format http://<HOST>/api/ """ if url.endswith("/api"): return f"{url}/" if not url.endswith("/api/"): return f"{url}api/" if url.endswith("/") else f"{url}/api/" return url
Scale the input value from 0-100 to 0-255.
def scaleto255(value: int | None) -> int: """Scale the input value from 0-100 to 0-255.""" if value is None: return 0 # Fibaro has a funny way of storing brightness either 0-100 or 0-99 # depending on device type (e.g. dimmer vs led) if value > 98: value = 100 return round(value * 2.55)
Scale the input value from 0-255 to 0-99.
def scaleto99(value: int | None) -> int: """Scale the input value from 0-255 to 0-99.""" if value is None: return 0 # Make sure a low but non-zero value is not rounded down to zero if 0 < value < 3: return 1 return min(round(value / 2.55), 99)
Validate the user input allows us to connect to fibaro.
def init_controller(data: Mapping[str, Any]) -> FibaroController: """Validate the user input allows us to connect to fibaro.""" controller = FibaroController(data) controller.connect_with_error_handling() return controller
Get the file notification service.
def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, ) -> FileNotificationService: """Get the file notification service.""" filename: str = config[CONF_FILENAME] timestamp: bool = config[CONF_TIMESTAMP] return FileNotificationService(filename, timestamp)
Validate path.
def validate_path(hass: HomeAssistant, path: str) -> str: """Validate path.""" get_path = pathlib.Path(path) if not get_path.exists() or not get_path.is_file(): _LOGGER.error("Can not access file %s", path) raise NotValidError if not hass.config.is_allowed_path(path): _LOGGER.error("Filepath %s is not allowed", path) raise NotAllowedError full_path = get_path.absolute() return str(full_path)
Get an uploaded file. File is removed at the end of the context.
def process_uploaded_file(hass: HomeAssistant, file_id: str) -> Iterator[Path]: """Get an uploaded file. File is removed at the end of the context. """ if DOMAIN not in hass.data: raise ValueError("File does not exist") file_upload_data: FileUploadData = hass.data[DOMAIN] if not file_upload_data.has_file(file_id): raise ValueError("File does not exist") try: yield file_upload_data.file_path(file_id) finally: file_upload_data.files.pop(file_id) shutil.rmtree(file_upload_data.file_dir(file_id))
Set up the sensors. Login to the bank and get a list of existing accounts. Create a sensor for each account.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the sensors. Login to the bank and get a list of existing accounts. Create a sensor for each account. """ credentials = BankCredentials( config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL] ) fints_name = config.get(CONF_NAME, config[CONF_BIN]) account_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS] } holdings_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_HOLDINGS] } client = FinTsClient(credentials, fints_name, account_config, holdings_config) balance_accounts, holdings_accounts = client.detect_accounts() accounts: list[SensorEntity] = [] for account in balance_accounts: if config[CONF_ACCOUNTS] and account.iban not in account_config: _LOGGER.info("Skipping account %s for bank %s", account.iban, fints_name) continue if not (account_name := account_config.get(account.iban)): account_name = f"{fints_name} - {account.iban}" accounts.append(FinTsAccount(client, account, account_name)) _LOGGER.debug("Creating account %s for bank %s", account.iban, fints_name) for account in holdings_accounts: if config[CONF_HOLDINGS] and account.accountnumber not in holdings_config: _LOGGER.info( "Skipping holdings %s for bank %s", account.accountnumber, fints_name ) continue account_name = holdings_config.get(account.accountnumber) if not account_name: account_name = f"{fints_name} - {account.accountnumber}" accounts.append(FinTsHoldingsAccount(client, account, account_name)) _LOGGER.debug( "Creating holdings %s for bank %s", account.accountnumber, fints_name ) add_entities(accounts, True)
Parse the integration config entry into a FitbitConfig.
def config_from_entry_data(data: Mapping[str, Any]) -> FitbitConfig: """Parse the integration config entry into a FitbitConfig.""" clock_format = data.get(CONF_CLOCK_FORMAT) # Originally entities were configured explicitly from yaml config. Newer # configurations will infer which entities to enable based on the allowed # scopes the user selected during OAuth. When creating entities based on # scopes, some entities are disabled by default. monitored_resources = data.get(CONF_MONITORED_RESOURCES) fitbit_scopes: set[FitbitScope] = set({}) if scopes := data["token"].get("scope"): fitbit_scopes = set({FitbitScope(scope) for scope in scopes.split(" ")}) return FitbitConfig(clock_format, monitored_resources, fitbit_scopes)
Parse a Fitbit timeseries API responses.
def _default_value_fn(result: dict[str, Any]) -> str: """Parse a Fitbit timeseries API responses.""" return cast(str, result["value"])
Format function for distance values.
def _distance_value_fn(result: dict[str, Any]) -> int | str: """Format function for distance values.""" return format(float(_default_value_fn(result)), ".2f")
Format function for body values.
def _body_value_fn(result: dict[str, Any]) -> int | str: """Format function for body values.""" return format(float(_default_value_fn(result)), ".1f")
Determine the weight unit.
def _weight_unit(unit_system: FitbitUnitSystem) -> UnitOfMass: """Determine the weight unit.""" if unit_system == FitbitUnitSystem.EN_US: return UnitOfMass.POUNDS if unit_system == FitbitUnitSystem.EN_GB: return UnitOfMass.STONES return UnitOfMass.KILOGRAMS
Determine the distance unit.
def _distance_unit(unit_system: FitbitUnitSystem) -> UnitOfLength: """Determine the distance unit.""" if unit_system == FitbitUnitSystem.EN_US: return UnitOfLength.MILES return UnitOfLength.KILOMETERS
Determine the elevation unit.
def _elevation_unit(unit_system: FitbitUnitSystem) -> UnitOfLength: """Determine the elevation unit.""" if unit_system == FitbitUnitSystem.EN_US: return UnitOfLength.FEET return UnitOfLength.METERS
Determine the water unit.
def _water_unit(unit_system: FitbitUnitSystem) -> UnitOfVolume: """Determine the water unit.""" if unit_system == FitbitUnitSystem.EN_US: return UnitOfVolume.FLUID_OUNCES return UnitOfVolume.MILLILITERS
Value function that will parse the specified field if present.
def _int_value_or_none(field: str) -> Callable[[dict[str, Any]], int | None]: """Value function that will parse the specified field if present.""" def convert(result: dict[str, Any]) -> int | None: if (value := result["value"].get(field)) is not None: return int(value) return None return convert
Load existing valid fitbit.conf from disk for import.
def load_config_file(config_path: str) -> dict[str, Any] | None: """Load existing valid fitbit.conf from disk for import.""" if os.path.isfile(config_path): config_file = load_json_object(config_path) if config_file != DEFAULT_CONFIG and all( key in config_file for key in FITBIT_CONF_KEYS ): return config_file return None
Set up the Fixer.io sensor.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Fixer.io sensor.""" api_key = config.get(CONF_API_KEY) name = config.get(CONF_NAME) target = config.get(CONF_TARGET) try: Fixerio(symbols=[target], access_key=api_key).latest() except FixerioException: _LOGGER.error("One of the given currencies is not supported") return data = ExchangeData(target, api_key) add_entities([ExchangeRateSensor(data, name, target)], True)
Set up a platform with added entities.
def async_setup_entry_platform( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, constructor: Callable[[FjaraskupanCoordinator], list[Entity]], ) -> None: """Set up a platform with added entities.""" entry_state: EntryState = hass.data[DOMAIN][entry.entry_id] async_add_entities( entity for coordinator in entry_state.coordinators.values() for entity in constructor(coordinator) ) @callback def _detection(coordinator: FjaraskupanCoordinator) -> None: async_add_entities(constructor(coordinator)) entry.async_on_unload( async_dispatcher_connect( hass, f"{DISPATCH_DETECTION}.{entry.entry_id}", _detection ) )
Set up the DeviceScanner and check if login is valid.
def setup_scanner( hass: HomeAssistant, config: ConfigType, see: SeeCallback, discovery_info: DiscoveryInfoType | None = None, ) -> bool: """Set up the DeviceScanner and check if login is valid.""" scanner = FleetGoDeviceScanner(config, see) if not scanner.login(hass): _LOGGER.error("FleetGO authentication failed") return False return True
Set up the flic platform.
def setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the flic platform.""" # Initialize flic client responsible for # connecting to buttons and retrieving events host = config.get(CONF_HOST) port = config.get(CONF_PORT) discovery = config.get(CONF_DISCOVERY) try: client = pyflic.FlicClient(host, port) except ConnectionRefusedError: _LOGGER.error("Failed to connect to flic server") return def new_button_callback(address): """Set up newly verified button as device in Home Assistant.""" setup_button(hass, config, add_entities, client, address) client.on_new_verified_button = new_button_callback if discovery: start_scanning(config, add_entities, client) hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: client.close()) # Start the pyflic event handling thread threading.Thread(target=client.handle_events).start() def get_info_callback(items): """Add entities for already verified buttons.""" addresses = items["bd_addr_of_verified_buttons"] or [] for address in addresses: setup_button(hass, config, add_entities, client, address) # Get addresses of already verified buttons client.get_info(get_info_callback)
Start a new flic client for scanning and connecting to new buttons.
def start_scanning(config, add_entities, client): """Start a new flic client for scanning and connecting to new buttons.""" scan_wizard = pyflic.ScanWizard() def scan_completed_callback(scan_wizard, result, address, name): """Restart scan wizard to constantly check for new buttons.""" if result == pyflic.ScanWizardResult.WizardSuccess: _LOGGER.info("Found new button %s", address) elif result != pyflic.ScanWizardResult.WizardFailedTimeout: _LOGGER.warning( "Failed to connect to button %s. Reason: %s", address, result ) # Restart scan wizard start_scanning(config, add_entities, client) scan_wizard.on_completed = scan_completed_callback client.add_scan_wizard(scan_wizard)
Set up a single button device.
def setup_button( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, client, address, ) -> None: """Set up a single button device.""" timeout: int = config[CONF_TIMEOUT] ignored_click_types: list[str] | None = config.get(CONF_IGNORED_CLICK_TYPES) button = FlicButton(hass, client, address, timeout, ignored_click_types) _LOGGER.info("Connected to button %s", address) add_entities([button])
Validate in the executor.
def _validate_input( hass: HomeAssistant, data: dict[str, Any], clear_token_file: bool ) -> FlumeDeviceList: """Validate in the executor.""" flume_token_full_path = hass.config.path( f"{BASE_TOKEN_FILENAME}-{data[CONF_USERNAME]}" ) if clear_token_file and os.path.exists(flume_token_full_path): os.unlink(flume_token_full_path) return FlumeDeviceList( FlumeAuth( data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_CLIENT_ID], data[CONF_CLIENT_SECRET], flume_token_file=flume_token_full_path, ) )
Return a list of Flume devices that have a valid location.
def get_valid_flume_devices(flume_devices: FlumeDeviceList) -> list[dict[str, Any]]: """Return a list of Flume devices that have a valid location.""" return [ device for device in flume_devices.device_list if KEY_DEVICE_LOCATION in device and KEY_DEVICE_LOCATION_NAME in device[KEY_DEVICE_LOCATION] ]
Config entry set up in executor.
def _setup_entry( hass: HomeAssistant, entry: ConfigEntry ) -> tuple[FlumeAuth, FlumeDeviceList, Session]: """Config entry set up in executor.""" config = entry.data username = config[CONF_USERNAME] password = config[CONF_PASSWORD] client_id = config[CONF_CLIENT_ID] client_secret = config[CONF_CLIENT_SECRET] flume_token_full_path = hass.config.path(f"{BASE_TOKEN_FILENAME}-{username}") http_session = Session() try: flume_auth = FlumeAuth( username, password, client_id, client_secret, flume_token_file=flume_token_full_path, http_session=http_session, ) flume_devices = FlumeDeviceList(flume_auth, http_session=http_session) except RequestException as ex: raise ConfigEntryNotReady from ex except Exception as ex: raise ConfigEntryAuthFailed from ex return flume_auth, flume_devices, http_session
Add the services for the flume integration.
def setup_service(hass: HomeAssistant) -> None: """Add the services for the flume integration.""" @callback def list_notifications(call: ServiceCall) -> ServiceResponse: """Return the user notifications.""" entry_id: str = call.data[CONF_CONFIG_ENTRY] entry: ConfigEntry | None = hass.config_entries.async_get_entry(entry_id) if not entry: raise ValueError(f"Invalid config entry: {entry_id}") if not (flume_domain_data := hass.data[DOMAIN].get(entry_id)): raise ValueError(f"Config entry not loaded: {entry_id}") return { "notifications": flume_domain_data[ FLUME_NOTIFICATIONS_COORDINATOR ].notifications } hass.services.async_register( DOMAIN, SERVICE_LIST_NOTIFICATIONS, list_notifications, schema=LIST_NOTIFICATIONS_SERVICE_SCHEMA, supports_response=SupportsResponse.ONLY, )
When discovery is unavailable, load it from the config entry.
def async_build_cached_discovery(entry: ConfigEntry) -> FluxLEDDiscovery: """When discovery is unavailable, load it from the config entry.""" data = entry.data return FluxLEDDiscovery( ipaddr=data[CONF_HOST], model=data.get(CONF_MODEL), id=format_as_flux_mac(entry.unique_id), model_num=data.get(CONF_MODEL_NUM), version_num=data.get(CONF_MINOR_VERSION), firmware_date=None, model_info=data.get(CONF_MODEL_INFO), model_description=data.get(CONF_MODEL_DESCRIPTION), remote_access_enabled=data.get(CONF_REMOTE_ACCESS_ENABLED), remote_access_host=data.get(CONF_REMOTE_ACCESS_HOST), remote_access_port=data.get(CONF_REMOTE_ACCESS_PORT), )
Convert a flux_led discovery to a human readable name.
def async_name_from_discovery( device: FluxLEDDiscovery, model_num: int | None = None ) -> str: """Convert a flux_led discovery to a human readable name.""" if (mac_address := device[ATTR_ID]) is None: return device[ATTR_IPADDR] short_mac = mac_address[-6:] if device[ATTR_MODEL_DESCRIPTION]: return f"{device[ATTR_MODEL_DESCRIPTION]} {short_mac}" if model_num is not None: return f"{get_model_description(model_num, None)} {short_mac}" return f"{device[ATTR_MODEL]} {short_mac}"