response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Register a ambiclimate implementation.
client_id: Client id.
client_secret: Client secret. | def register_flow_implementation(
hass: HomeAssistant, client_id: str, client_secret: str
) -> None:
"""Register a ambiclimate implementation.
client_id: Client id.
client_secret: Client secret.
"""
hass.data.setdefault(DATA_AMBICLIMATE_IMPL, {})
hass.data[DATA_AMBICLIMATE_IMPL] = {
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
} |
Pick a station name.
Station names can be empty, in which case we construct the name from
the location and device type. | def get_station_name(station: dict[str, Any]) -> str:
"""Pick a station name.
Station names can be empty, in which case we construct the name from
the location and device type.
"""
if name := station.get(API_STATION_INFO, {}).get(API_STATION_NAME):
return str(name)
location = (
station.get(API_STATION_INFO, {})
.get(API_STATION_COORDS, {})
.get(API_STATION_LOCATION)
)
station_type = station.get(API_LAST_DATA, {}).get(API_STATION_TYPE)
return f"{location}{'' if location is None or station_type is None else ' '}{station_type}" |
Calculate illuminance (in lux). | def async_wm2_to_lx(value: float) -> int:
"""Calculate illuminance (in lux)."""
return round(value / 0.0079) |
Hydrate station data with addition or normalized data. | def async_hydrate_station_data(data: dict[str, Any]) -> dict[str, Any]:
"""Hydrate station data with addition or normalized data."""
if (irradiation := data.get(TYPE_SOLARRADIATION)) is not None:
data[TYPE_SOLARRADIATION_LX] = async_wm2_to_lx(irradiation)
return data |
Validate binary sensor configurations. | def check_binary_sensors(value: list[str]) -> list[str]:
"""Validate binary sensor configurations."""
for exclusive_options in _EXCLUSIVE_OPTIONS:
if len(set(value) & exclusive_options) > 1:
raise vol.Invalid(
f"must contain at most one of {', '.join(exclusive_options)}."
)
return value |
Encode signal. | def service_signal(service: str, *args: str) -> str:
"""Encode signal."""
return "_".join([DOMAIN, service, *args]) |
Log an update error. | def log_update_error(
logger: logging.Logger,
action: str,
name: str | UndefinedType | None,
entity_type: str,
error: Exception,
level: int = logging.ERROR,
) -> None:
"""Log an update error."""
logger.log(
level,
"Could not %s %s %s due to error: %s",
action,
name,
entity_type,
error.__class__.__name__,
) |
Return analytics preferences. | def websocket_analytics(
hass: HomeAssistant,
connection: websocket_api.connection.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Return analytics preferences."""
analytics: Analytics = hass.data[DOMAIN]
connection.send_result(
msg["id"],
{ATTR_PREFERENCES: analytics.preferences, ATTR_ONBOARDED: analytics.onboarded},
) |
Get custom integration value. | def get_custom_integration_value(
data: dict[str, CustomIntegration], domain: str
) -> int:
"""Get custom integration value."""
if domain in data:
return data[domain].total
return 0 |
Get core integration entity description. | def get_core_integration_entity_description(
domain: str, name: str
) -> AnalyticsSensorEntityDescription:
"""Get core integration entity description."""
return AnalyticsSensorEntityDescription(
key=f"core_{domain}_active_installations",
translation_key="core_integrations",
name=name,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement="active installations",
value_fn=lambda data: data.core_integrations.get(domain),
) |
Get custom integration entity description. | def get_custom_integration_entity_description(
domain: str,
) -> AnalyticsSensorEntityDescription:
"""Get custom integration entity description."""
return AnalyticsSensorEntityDescription(
key=f"custom_{domain}_active_installations",
translation_key="custom_integrations",
translation_placeholders={"custom_integration_domain": domain},
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement="active installations",
value_fn=lambda data: data.custom_integrations.get(domain),
) |
Validate that the value is an existing file. | def _is_file(value: str) -> bool:
"""Validate that the value is an existing file."""
file_in = os.path.expanduser(value)
return os.path.isfile(file_in) and os.access(file_in, os.R_OK) |
Validate a string that contain state detection rules and return a dict. | def _validate_state_det_rules(state_det_rules: Any) -> list[Any] | None:
"""Validate a string that contain state detection rules and return a dict."""
json_rules = state_det_rules
if not isinstance(json_rules, list):
json_rules = [json_rules]
try:
state_detection_rules_validator(json_rules, ValueError)
except ValueError as exc:
_LOGGER.warning("Invalid state detection rules: %s", exc)
return None
return json_rules |
Wrap ADB methods and catch exceptions.
Allows for overriding the available status of the ADB connection via the
`override_available` parameter. | def adb_decorator(
override_available: bool = False,
) -> Callable[[_FuncType[_ADBDeviceT, _P, _R]], _ReturnFuncType[_ADBDeviceT, _P, _R]]:
"""Wrap ADB methods and catch exceptions.
Allows for overriding the available status of the ADB connection via the
`override_available` parameter.
"""
def _adb_decorator(
func: _FuncType[_ADBDeviceT, _P, _R],
) -> _ReturnFuncType[_ADBDeviceT, _P, _R]:
"""Wrap the provided ADB method and catch exceptions."""
@functools.wraps(func)
async def _adb_exception_catcher(
self: _ADBDeviceT, *args: _P.args, **kwargs: _P.kwargs
) -> _R | None:
"""Call an ADB-related method and catch exceptions."""
if not self.available and not override_available:
return None
try:
return await func(self, *args, **kwargs)
except LockNotAcquiredException:
# If the ADB lock could not be acquired, skip this command
_LOGGER.info(
(
"ADB command %s not executed because the connection is"
" currently in use"
),
func.__name__,
)
return None
except self.exceptions as err:
_LOGGER.error(
(
"Failed to execute an ADB command. ADB connection re-"
"establishing attempt in the next update. Error: %s"
),
err,
)
await self.aftv.adb_close()
# pylint: disable-next=protected-access
self._attr_available = False
return None
except Exception:
# An unforeseen exception occurred. Close the ADB connection so that
# it doesn't happen over and over again, then raise the exception.
await self.aftv.adb_close()
# pylint: disable-next=protected-access
self._attr_available = False
raise
return _adb_exception_catcher
return _adb_decorator |
Return formatted mac from device properties. | def get_androidtv_mac(dev_props: dict[str, Any]) -> str | None:
"""Return formatted mac from device properties."""
for prop_mac in (PROP_ETHMAC, PROP_WIFIMAC):
if if_mac := dev_props.get(prop_mac):
mac = format_mac(if_mac)
if mac not in _INVALID_MACS:
return mac
return None |
Generate an ADB key (if needed) and load it. | def _setup_androidtv(
hass: HomeAssistant, config: Mapping[str, Any]
) -> tuple[str, PythonRSASigner | None, str]:
"""Generate an ADB key (if needed) and load it."""
adbkey: str = config.get(
CONF_ADBKEY, hass.config.path(STORAGE_DIR, "androidtv_adbkey")
)
if CONF_ADB_SERVER_IP not in config:
# Use "adb_shell" (Python ADB implementation)
if not os.path.isfile(adbkey):
# Generate ADB key files
keygen(adbkey)
# Load the ADB key
signer = ADBPythonSync.load_adbkey(adbkey)
adb_log = f"using Python ADB implementation with adbkey='{adbkey}'"
else:
# Use "pure-python-adb" (communicate with ADB server)
signer = None
adb_log = (
"using ADB server at"
f" {config[CONF_ADB_SERVER_IP]}:{config[CONF_ADB_SERVER_PORT]}"
)
return adbkey, signer, adb_log |
Create an AndroidTVRemote instance. | def create_api(hass: HomeAssistant, host: str, enable_ime: bool) -> AndroidTVRemote:
"""Create an AndroidTVRemote instance."""
return AndroidTVRemote(
client_name="Home Assistant",
certfile=hass.config.path(STORAGE_DIR, "androidtv_remote_cert.pem"),
keyfile=hass.config.path(STORAGE_DIR, "androidtv_remote_key.pem"),
host=host,
loop=hass.loop,
enable_ime=enable_ime,
) |
Get value of enable_ime option or its default value. | def get_enable_ime(entry: ConfigEntry) -> bool:
"""Get value of enable_ime option or its default value."""
return entry.options.get(CONF_ENABLE_IME, CONF_ENABLE_IME_DEFAULT_VALUE) |
Set up PwrCtrl devices/switches. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up PwrCtrl devices/switches."""
host = config.get(CONF_HOST)
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
port_recv = config[CONF_PORT_RECV]
port_send = config[CONF_PORT_SEND]
try:
master = DeviceMaster(
username=username,
password=password,
read_port=port_send,
write_port=port_recv,
)
master.query(ip_addr=host)
except OSError as ex:
_LOGGER.error("Unable to discover PwrCtrl device: %s", str(ex))
return
devices: list[SwitchEntity] = []
for device in master.devices.values():
parent_device = PwrCtrlDevice(device)
devices.extend(
PwrCtrlSwitch(switch, parent_device) for switch in device.switches.values()
)
add_entities(devices) |
Turn the device list into a serializable list that can be reconstructed. | def serialize_device_list(devices: list[AnovaPrecisionCooker]) -> list[tuple[str, str]]:
"""Turn the device list into a serializable list that can be reconstructed."""
return [(device.device_key, device.type) for device in devices] |
If the value ends with any of the units from supported units.
Split the unit off the end of the value and return the value, unit tuple
pair. Else return the original value and None as the unit. | def infer_unit(value: str) -> tuple[str, str | None]:
"""If the value ends with any of the units from supported units.
Split the unit off the end of the value and return the value, unit tuple
pair. Else return the original value and None as the unit.
"""
for unit, ha_unit in INFERRED_UNITS.items():
if value.endswith(unit):
return value.removesuffix(unit), ha_unit
return value, None |
Return a cached template. | def _cached_template(template_str: str, hass: HomeAssistant) -> template.Template:
"""Return a cached template."""
return template.Template(template_str, hass) |
Generate event data to JSONify. | def async_events_json(hass: HomeAssistant) -> list[dict[str, Any]]:
"""Generate event data to JSONify."""
return [
{"event": key, "listener_count": value}
for key, value in hass.bus.async_listeners().items()
] |
Create response payload for app list. | def build_app_list(app_list: dict[str, str]) -> BrowseMedia:
"""Create response payload for app list."""
media_list = [
{"app_id": app_id, "title": app_name, "type": MediaType.APP}
for app_name, app_id in app_list.items()
]
return BrowseMedia(
media_class=MediaClass.DIRECTORY,
media_content_id="apps",
media_content_type=MediaType.APPS,
title="Apps",
can_play=False,
can_expand=True,
children=[item_payload(item) for item in media_list],
children_media_class=MediaClass.APP,
) |
Create response payload for a single media item.
Used by async_browse_media. | def item_payload(item: dict[str, Any]) -> BrowseMedia:
"""Create response payload for a single media item.
Used by async_browse_media.
"""
return BrowseMedia(
title=item["title"],
media_class=MediaClass.APP,
media_content_type=MediaType.APP,
media_content_id=item["app_id"],
can_play=False,
can_expand=False,
) |
Get the Apprise notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> AppriseNotificationService | None:
"""Get the Apprise notification service."""
# Create our Apprise Instance (reference our asset)
a_obj = apprise.Apprise()
if config.get(CONF_FILE):
# Sourced from a Configuration File
a_config = apprise.AppriseConfig()
if not a_config.add(config[CONF_FILE]):
_LOGGER.error("Invalid Apprise config url provided")
return None
if not a_obj.add(a_config):
_LOGGER.error("Invalid Apprise config url provided")
return None
# Ordered list of URLs
if urls := config.get(CONF_URL):
for entry in urls:
if not a_obj.add(entry):
_LOGGER.error("One or more specified Apprise URL(s) are invalid")
return None
return AppriseNotificationService(a_obj) |
Make a server-side filter from a list of callsigns. | def make_filter(callsigns: list) -> str:
"""Make a server-side filter from a list of callsigns."""
return " ".join(f"b/{sign.upper()}" for sign in callsigns) |
Calculate the GPS accuracy based on APRS posambiguity. | def gps_accuracy(gps: tuple[float, float], posambiguity: int) -> int:
"""Calculate the GPS accuracy based on APRS posambiguity."""
pos_a_map = {0: 0, 1: 1 / 600, 2: 1 / 60, 3: 1 / 6, 4: 1}
if posambiguity in pos_a_map:
degrees = pos_a_map[posambiguity]
gps2 = (gps[0], gps[1] + degrees)
dist_m: float = geopy.distance.distance(gps, gps2).m
accuracy = round(dist_m)
else:
message = f"APRS position ambiguity must be 0-4, not '{posambiguity}'."
raise ValueError(message)
return accuracy |
Set up the APRS tracker. | def setup_scanner(
hass: HomeAssistant,
config: ConfigType,
see: SeeCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> bool:
"""Set up the APRS tracker."""
callsigns = config[CONF_CALLSIGNS]
server_filter = make_filter(callsigns)
callsign = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
host = config[CONF_HOST]
timeout = config[CONF_TIMEOUT]
aprs_listener = AprsListenerThread(callsign, password, host, server_filter, see)
def aprs_disconnect(event: Event) -> None:
"""Stop the APRS connection."""
aprs_listener.stop()
aprs_listener.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, aprs_disconnect)
if not aprs_listener.start_event.wait(timeout):
_LOGGER.error("Timeout waiting for APRS to connect")
return False
if not aprs_listener.start_success:
_LOGGER.error(aprs_listener.start_message)
return False
_LOGGER.debug(aprs_listener.start_message)
return True |
Set up AquaLogic platform. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up AquaLogic platform."""
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
processor = AquaLogicProcessor(hass, host, port)
hass.data[DOMAIN] = processor
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, processor.start_listen)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, processor.shutdown)
_LOGGER.debug("AquaLogicProcessor %s:%i initialized", host, port)
return True |
Set up the Sharp Aquos TV platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Sharp Aquos TV platform."""
name = config[CONF_NAME]
port = config[CONF_PORT]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
power_on_enabled = config["power_on_enabled"]
host = config[CONF_HOST]
remote = sharp_aquos_rc.TV(host, port, username, password, 15, 1)
add_entities([SharpAquosTVDevice(name, remote, power_on_enabled)]) |
Handle query retries. | def _retry(
func: Callable[Concatenate[_SharpAquosTVDeviceT, _P], Any],
) -> Callable[Concatenate[_SharpAquosTVDeviceT, _P], None]:
"""Handle query retries."""
def wrapper(obj: _SharpAquosTVDeviceT, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Wrap all query functions."""
update_retries = 5
while update_retries > 0:
try:
func(obj, *args, **kwargs)
break
except (OSError, TypeError, ValueError):
update_retries -= 1
if update_retries == 0:
obj.set_state(MediaPlayerState.OFF)
return wrapper |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device: BLEDevice,
key: str,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(key, device.address) |
Convert a sensor device info to hass device info. | def _sensor_device_info_to_hass(
adv: Aranet4Advertisement,
) -> DeviceInfo:
"""Convert a sensor device info to hass device info."""
hass_device_info = DeviceInfo({})
if adv.readings and adv.readings.name:
hass_device_info[ATTR_NAME] = adv.readings.name
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
if adv.manufacturer_data:
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
return hass_device_info |
Convert a sensor update to a Bluetooth data update. | def sensor_update_to_bluetooth_data_update(
adv: Aranet4Advertisement,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a Bluetooth data update."""
data: dict[PassiveBluetoothEntityKey, Any] = {}
names: dict[PassiveBluetoothEntityKey, str | None] = {}
descs: dict[PassiveBluetoothEntityKey, EntityDescription] = {}
for key, desc in SENSOR_DESCRIPTIONS.items():
tag = _device_key_to_bluetooth_entity_key(adv.device, key)
val = getattr(adv.readings, key)
if val == -1:
continue
val *= desc.scale
data[tag] = val
names[tag] = desc.name
descs[tag] = desc
return PassiveBluetoothDataUpdate(
devices={adv.device.address: _sensor_device_info_to_hass(adv)},
entity_descriptions=descs,
entity_data=data,
entity_names=names,
) |
Retrieve client associated with a config entry. | def get_entry_client(hass: HomeAssistant, entry: ConfigEntry) -> Client:
"""Retrieve client associated with a config entry."""
client: Client = hass.data[DOMAIN_DATA_ENTRIES][entry.entry_id]
return client |
Return decorator to convert a connection error into a home assistant error. | def convert_exception(
func: Callable[_P, Coroutine[Any, Any, _R]],
) -> Callable[_P, Coroutine[Any, Any, _R]]:
"""Return decorator to convert a connection error into a home assistant error."""
@functools.wraps(func)
async def _convert_exception(*args: _P.args, **kwargs: _P.kwargs) -> _R:
try:
return await func(*args, **kwargs)
except ConnectionFailed as exception:
raise HomeAssistantError(
f"Connection failed to device during {func}"
) from exception
return _convert_exception |
Set up the aREST binary sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the aREST binary sensor."""
resource = config[CONF_RESOURCE]
pin = config[CONF_PIN]
device_class = config.get(CONF_DEVICE_CLASS)
try:
response = requests.get(resource, timeout=10).json()
except requests.exceptions.MissingSchema:
_LOGGER.error(
"Missing resource or schema in configuration. Add http:// to your URL"
)
return
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device at %s", resource)
return
arest = ArestData(resource, pin)
add_entities(
[
ArestBinarySensor(
arest,
resource,
config.get(CONF_NAME, response[CONF_NAME]),
device_class,
pin,
)
],
True,
) |
Set up the aREST sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the aREST sensor."""
resource = config[CONF_RESOURCE]
var_conf = config[CONF_MONITORED_VARIABLES]
pins = config[CONF_PINS]
try:
response = requests.get(resource, timeout=10).json()
except requests.exceptions.MissingSchema:
_LOGGER.error(
"Missing resource or schema in configuration. Add http:// to your URL"
)
return
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device at %s", resource)
return
arest = ArestData(resource)
def make_renderer(value_template):
"""Create a renderer based on variable_template value."""
if value_template is None:
return lambda value: value
value_template.hass = hass
def _render(value):
try:
return value_template.async_render({"value": value}, parse_result=False)
except TemplateError:
_LOGGER.exception("Error parsing value")
return value
return _render
dev = []
if var_conf is not None:
for variable, var_data in var_conf.items():
if variable not in response["variables"]:
_LOGGER.error("Variable: %s does not exist", variable)
continue
renderer = make_renderer(var_data.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
arest,
resource,
config.get(CONF_NAME, response[CONF_NAME]),
var_data.get(CONF_NAME, variable),
variable=variable,
unit_of_measurement=var_data.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
if pins is not None:
for pinnum, pin in pins.items():
renderer = make_renderer(pin.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
ArestData(resource, pinnum),
resource,
config.get(CONF_NAME, response[CONF_NAME]),
pin.get(CONF_NAME),
pin=pinnum,
unit_of_measurement=pin.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
add_entities(dev, True) |
Set up the aREST switches. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the aREST switches."""
resource = config[CONF_RESOURCE]
try:
response = requests.get(resource, timeout=10)
except requests.exceptions.MissingSchema:
_LOGGER.error(
"Missing resource or schema in configuration. Add http:// to your URL"
)
return
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device at %s", resource)
return
dev: list[SwitchEntity] = []
pins = config[CONF_PINS]
for pinnum, pin in pins.items():
dev.append(
ArestSwitchPin(
resource,
config.get(CONF_NAME, response.json()[CONF_NAME]),
pin.get(CONF_NAME),
pinnum,
pin[CONF_INVERT],
)
)
functions = config[CONF_FUNCTIONS]
for funcname, func in functions.items():
dev.append(
ArestSwitchFunction(
resource,
config.get(CONF_NAME, response.json()[CONF_NAME]),
func.get(CONF_NAME),
funcname,
)
)
add_entities(dev) |
Validate the configuration and return a Aruba scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> ArubaDeviceScanner | None:
"""Validate the configuration and return a Aruba scanner."""
scanner = ArubaDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
Given a topic, dynamically create the right sensor type.
Async friendly. | def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] | None:
"""Given a topic, dynamically create the right sensor type.
Async friendly.
"""
parts = topic.split("/")
unit = payload.get("units", "")
domain = parts[1]
if domain == "temperature":
name = parts[2]
if unit == "F":
unit = UnitOfTemperature.FAHRENHEIT
else:
unit = UnitOfTemperature.CELSIUS
return [
ArwnSensor(
topic, name, "temp", unit, device_class=SensorDeviceClass.TEMPERATURE
)
]
if domain == "moisture":
name = f"{parts[2]} Moisture"
return [ArwnSensor(topic, name, "moisture", unit, "mdi:water-percent")]
if domain == "rain":
if len(parts) >= 3 and parts[2] == "today":
return [
ArwnSensor(
topic,
"Rain Since Midnight",
"since_midnight",
UnitOfPrecipitationDepth.INCHES,
device_class=SensorDeviceClass.PRECIPITATION,
)
]
return [
ArwnSensor(
topic + "/total",
"Total Rainfall",
"total",
unit,
device_class=SensorDeviceClass.PRECIPITATION,
),
ArwnSensor(
topic + "/rate",
"Rainfall Rate",
"rate",
unit,
device_class=SensorDeviceClass.PRECIPITATION,
),
]
if domain == "barometer":
return [
ArwnSensor(topic, "Barometer", "pressure", unit, "mdi:thermometer-lines")
]
if domain == "wind":
return [
ArwnSensor(
topic + "/speed",
"Wind Speed",
"speed",
unit,
device_class=SensorDeviceClass.WIND_SPEED,
),
ArwnSensor(
topic + "/gust",
"Wind Gust",
"gust",
unit,
device_class=SensorDeviceClass.WIND_SPEED,
),
ArwnSensor(
topic + "/dir", "Wind Direction", "direction", DEGREE, "mdi:compass"
),
]
return None |
Describe logbook events. | def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[[str, str, Callable[[Event], dict[str, str]]], None],
) -> None:
"""Describe logbook events."""
device_registry = dr.async_get(hass)
@callback
def async_describe_logbook_event(event: Event) -> dict[str, str]:
"""Describe logbook event."""
device: dr.DeviceEntry | None = None
device_name: str = "Unknown device"
device = device_registry.devices[event.data[ATTR_DEVICE_ID]]
if device:
device_name = device.name_by_user or device.name or "Unknown device"
message = f"{device_name} captured an audio sample"
return {
LOGBOOK_ENTRY_NAME: device_name,
LOGBOOK_ENTRY_MESSAGE: message,
}
async_describe_event(DOMAIN, EVENT_RECORDING, async_describe_logbook_event) |
Validate language settings. | def validate_language(data: dict[str, Any]) -> Any:
"""Validate language settings."""
for engine, language in ENGINE_LANGUAGE_PAIRS:
if data[engine] is not None and data[language] is None:
raise vol.Invalid(f"Need language {language} for {engine} {data[engine]}")
return data |
Get a pipeline by id or the preferred pipeline. | def async_get_pipeline(hass: HomeAssistant, pipeline_id: str | None = None) -> Pipeline:
"""Get a pipeline by id or the preferred pipeline."""
pipeline_data: PipelineData = hass.data[DOMAIN]
if pipeline_id is None:
# A pipeline was not specified, use the preferred one
pipeline_id = pipeline_data.pipeline_store.async_get_preferred_item()
pipeline = pipeline_data.pipeline_store.data.get(pipeline_id)
# If invalid pipeline ID was specified
if pipeline is None:
raise PipelineNotFound(
"pipeline_not_found", f"Pipeline {pipeline_id} not found"
)
return pipeline |
Get all pipelines. | def async_get_pipelines(hass: HomeAssistant) -> Iterable[Pipeline]:
"""Get all pipelines."""
pipeline_data: PipelineData = hass.data[DOMAIN]
return pipeline_data.pipeline_store.data.values() |
Multiplies 16-bit PCM samples by a constant. | def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes:
"""Multiplies 16-bit PCM samples by a constant."""
def _clamp(val: float) -> float:
"""Clamp to signed 16-bit."""
return max(-32768, min(32767, val))
return array.array(
"h",
(int(_clamp(value * volume_multiplier)) for value in array.array("h", chunk)),
).tobytes() |
Register a migration of an engine used in pipelines. | def async_migrate_engine(
hass: HomeAssistant,
engine_type: Literal["conversation", "stt", "tts", "wake_word"],
old_value: str,
new_value: str,
) -> None:
"""Register a migration of an engine used in pipelines."""
hass.data.setdefault(DATA_MIGRATIONS, {})[engine_type] = (old_value, new_value)
# Run migrations when config is already loaded
if DATA_CONFIG in hass.data:
hass.async_create_background_task(
async_run_migrations(hass), "assist_pipeline_migration", eager_start=True
) |
Get the chosen pipeline for a domain. | def get_chosen_pipeline(
hass: HomeAssistant, domain: str, unique_id_prefix: str
) -> str | None:
"""Get the chosen pipeline for a domain."""
ent_reg = er.async_get(hass)
pipeline_entity_id = ent_reg.async_get_entity_id(
Platform.SELECT, domain, f"{unique_id_prefix}-pipeline"
)
if pipeline_entity_id is None:
return None
state = hass.states.get(pipeline_entity_id)
if state is None or state.state == OPTION_PREFERRED:
return None
pipeline_store: PipelineStorageCollection = hass.data[DOMAIN].pipeline_store
return next(
(item.id for item in pipeline_store.async_items() if item.name == state.state),
None,
) |
Get the chosen vad sensitivity for a domain. | def get_vad_sensitivity(
hass: HomeAssistant, domain: str, unique_id_prefix: str
) -> VadSensitivity:
"""Get the chosen vad sensitivity for a domain."""
ent_reg = er.async_get(hass)
sensitivity_entity_id = ent_reg.async_get_entity_id(
Platform.SELECT, domain, f"{unique_id_prefix}-vad_sensitivity"
)
if sensitivity_entity_id is None:
return VadSensitivity.DEFAULT
state = hass.states.get(sensitivity_entity_id)
if state is None:
return VadSensitivity.DEFAULT
return VadSensitivity(state.state) |
Yield fixed-sized chunks from samples, keeping leftover bytes from previous call(s). | def chunk_samples(
samples: bytes,
bytes_per_chunk: int,
leftover_chunk_buffer: AudioBuffer,
) -> Iterable[bytes]:
"""Yield fixed-sized chunks from samples, keeping leftover bytes from previous call(s)."""
if (len(leftover_chunk_buffer) + len(samples)) < bytes_per_chunk:
# Extend leftover chunk, but not enough samples to complete it
leftover_chunk_buffer.append(samples)
return
next_chunk_idx = 0
if leftover_chunk_buffer:
# Add to leftover chunk from previous call(s).
bytes_to_copy = bytes_per_chunk - len(leftover_chunk_buffer)
leftover_chunk_buffer.append(samples[:bytes_to_copy])
next_chunk_idx = bytes_to_copy
# Process full chunk in buffer
yield leftover_chunk_buffer.bytes()
leftover_chunk_buffer.clear()
while next_chunk_idx < len(samples) - bytes_per_chunk + 1:
# Process full chunk
yield samples[next_chunk_idx : next_chunk_idx + bytes_per_chunk]
next_chunk_idx += bytes_per_chunk
# Capture leftover chunks
if rest_samples := samples[next_chunk_idx:]:
leftover_chunk_buffer.append(rest_samples) |
Register the websocket API. | def async_register_websocket_api(hass: HomeAssistant) -> None:
"""Register the websocket API."""
websocket_api.async_register_command(hass, websocket_run)
websocket_api.async_register_command(hass, websocket_list_languages)
websocket_api.async_register_command(hass, websocket_list_runs)
websocket_api.async_register_command(hass, websocket_list_devices)
websocket_api.async_register_command(hass, websocket_get_run)
websocket_api.async_register_command(hass, websocket_device_capture) |
List pipeline runs for which debug data is available. | def websocket_list_runs(
hass: HomeAssistant,
connection: websocket_api.connection.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List pipeline runs for which debug data is available."""
pipeline_data: PipelineData = hass.data[DOMAIN]
pipeline_id = msg["pipeline_id"]
if pipeline_id not in pipeline_data.pipeline_debug:
connection.send_result(msg["id"], {"pipeline_runs": []})
return
pipeline_debug = pipeline_data.pipeline_debug[pipeline_id]
connection.send_result(
msg["id"],
{
"pipeline_runs": [
{
"pipeline_run_id": pipeline_run_id,
"timestamp": pipeline_run.timestamp,
}
for pipeline_run_id, pipeline_run in pipeline_debug.items()
]
},
) |
List assist devices. | def websocket_list_devices(
hass: HomeAssistant,
connection: websocket_api.connection.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List assist devices."""
pipeline_data: PipelineData = hass.data[DOMAIN]
ent_reg = er.async_get(hass)
connection.send_result(
msg["id"],
[
{
"device_id": device_id,
"pipeline_entity": ent_reg.async_get_entity_id(
"select", info.domain, f"{info.unique_id_prefix}-pipeline"
),
}
for device_id, info in pipeline_data.pipeline_devices.items()
],
) |
Get debug data for a pipeline run. | def websocket_get_run(
hass: HomeAssistant,
connection: websocket_api.connection.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Get debug data for a pipeline run."""
pipeline_data: PipelineData = hass.data[DOMAIN]
pipeline_id = msg["pipeline_id"]
pipeline_run_id = msg["pipeline_run_id"]
if pipeline_id not in pipeline_data.pipeline_debug:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"pipeline_id {pipeline_id} not found",
)
return
pipeline_debug = pipeline_data.pipeline_debug[pipeline_id]
if pipeline_run_id not in pipeline_debug:
connection.send_error(
msg["id"],
websocket_api.const.ERR_NOT_FOUND,
f"pipeline_run_id {pipeline_run_id} not found",
)
return
connection.send_result(
msg["id"],
{"events": pipeline_debug[pipeline_run_id].events},
) |
Set up for the Asterisk Voicemail box. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up for the Asterisk Voicemail box."""
conf: dict[str, Any] = config[DOMAIN]
host: str = conf[CONF_HOST]
port: int = conf[CONF_PORT]
password: str = conf[CONF_PASSWORD]
hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config)
create_issue(
hass,
DOMAIN,
"deprecated_integration",
breaks_in_ha_version="2024.9.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_integration",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Asterisk Voicemail",
"mailbox": "mailbox",
},
)
return True |
Run library methods and zip results or manage exceptions. | def handle_errors_and_zip(
exceptions: type[Exception] | tuple[type[Exception], ...], keys: list[str] | None
) -> Callable[[_FuncType], _ReturnFuncType]:
"""Run library methods and zip results or manage exceptions."""
def _handle_errors_and_zip(func: _FuncType) -> _ReturnFuncType:
"""Run library methods and zip results or manage exceptions."""
@functools.wraps(func)
async def _wrapper(self: _AsusWrtBridgeT) -> dict[str, Any]:
try:
data = await func(self)
except exceptions as exc:
raise UpdateFailed(exc) from exc
if keys is None:
if not isinstance(data, dict):
raise UpdateFailed("Received invalid data type")
return data
if isinstance(data, dict):
return dict(zip(keys, list(data.values()), strict=False))
if not isinstance(data, (list, tuple)):
raise UpdateFailed("Received invalid data type")
return dict(zip(keys, data, strict=False))
return _wrapper
return _handle_errors_and_zip |
Validate that the value is an existing file. | def _is_file(value: str) -> bool:
"""Validate that the value is an existing file."""
file_in = os.path.expanduser(value)
return os.path.isfile(file_in) and os.access(file_in, os.R_OK) |
Get the ip address from the host name. | def _get_ip(host: str) -> str | None:
"""Get the ip address from the host name."""
try:
return socket.gethostbyname(host)
except socket.gaierror:
return None |
Add new tracker entities from the router. | def add_entities(
router: AsusWrtRouter, async_add_entities: AddEntitiesCallback, tracked: set[str]
) -> None:
"""Add new tracker entities from the router."""
new_tracked = []
for mac, device in router.devices.items():
if mac in tracked:
continue
new_tracked.append(AsusWrtDevice(router, device))
tracked.add(mac)
async_add_entities(new_tracked) |
Set up the Atome sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Atome sensor."""
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
try:
atome_client = AtomeClient(username, password)
atome_client.login()
except PyAtomeError as exp:
_LOGGER.error(exp)
return
data = AtomeData(atome_client)
sensors = []
sensors.append(AtomeSensor(data, LIVE_NAME, LIVE_TYPE))
sensors.append(AtomeSensor(data, DAILY_NAME, DAILY_TYPE))
sensors.append(AtomeSensor(data, WEEKLY_NAME, WEEKLY_TYPE))
sensors.append(AtomeSensor(data, MONTHLY_NAME, MONTHLY_TYPE))
sensors.append(AtomeSensor(data, YEARLY_NAME, YEARLY_TYPE))
add_entities(sensors, True) |
Cancel future scheduled updates. | def _async_cancel_future_scheduled_updates(cancels: list[CALLBACK_TYPE]) -> None:
"""Cancel future scheduled updates."""
for cancel in cancels:
cancel()
cancels.clear() |
Get the latest state of the sensor. | def _retrieve_online_state(
data: AugustData, detail: DoorbellDetail | LockDetail
) -> bool:
"""Get the latest state of the sensor."""
# The doorbell will go into standby mode when there is no motion
# for a short while. It will wake by itself when needed so we need
# to consider is available or we will not report motion or dings
if isinstance(detail, DoorbellDetail):
return detail.is_online or detail.is_standby
return detail.bridge_is_online |
Get the latest state of the sensor. | def _activity_time_based_state(latest: Activity) -> bool:
"""Get the latest state of the sensor."""
start = latest.activity_start_time
end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION
return start <= _native_datetime() <= end |
Return time in the format august uses without timezone. | def _native_datetime() -> datetime:
"""Return time in the format august uses without timezone."""
return datetime.now() |
Strip device types from a string.
August stores the name as Master Bed Lock
or Master Bed Door. We can come up with a
reasonable suggestion by removing the supported
device types from the string. | def _remove_device_types(name: str, device_types: list[str]) -> str:
"""Strip device types from a string.
August stores the name as Master Bed Lock
or Master Bed Door. We can come up with a
reasonable suggestion by removing the supported
device types from the string.
"""
lower_name = name.lower()
for device_type in device_types:
lower_name = lower_name.removesuffix(f" {device_type}")
return name[: len(lower_name)] |
Get the latest state of the sensor. | def _retrieve_device_battery_state(detail: LockDetail) -> int:
"""Get the latest state of the sensor."""
return detail.battery_level |
Get the latest state of the sensor. | def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None:
"""Get the latest state of the sensor."""
return detail.battery_percentage |
Create an aiohttp session for the august integration. | def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession:
"""Create an aiohttp session for the august integration."""
# Create an aiohttp session instead of using the default one since the
# default one is likely to trigger august's WAF if another integration
# is also using Cloudflare
#
# The family is set to AF_INET because IPv6 keeps coming up as an issue
# see https://github.com/home-assistant/core/issues/97146
#
# When https://github.com/aio-libs/aiohttp/issues/4451 is implemented
# we can allow IPv6 again
#
return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET) |
Update keys for the yalexs-ble integration if available. | def _async_trigger_ble_lock_discovery(
hass: HomeAssistant, locks_with_offline_keys: list[LockDetail]
) -> None:
"""Update keys for the yalexs-ble integration if available."""
for lock_detail in locks_with_offline_keys:
discovery_flow.async_create_flow(
hass,
YALEXS_BLE_DOMAIN,
context={"source": SOURCE_INTEGRATION_DISCOVERY},
data=YaleXSBLEDiscovery(
{
"name": lock_detail.device_name,
"address": lock_detail.mac_address,
"serial": lock_detail.serial_number,
"key": lock_detail.offline_key,
"slot": lock_detail.offline_slot,
}
),
) |
Store the attributes that the lock detail api may have an invalid cache for.
Since we are connected to pubnub we may have more current data
then the api so we want to restore the most current data after
updating battery state etc. | def _save_live_attrs(lock_detail: DoorbellDetail | LockDetail) -> dict[str, Any]:
"""Store the attributes that the lock detail api may have an invalid cache for.
Since we are connected to pubnub we may have more current data
then the api so we want to restore the most current data after
updating battery state etc.
"""
return {attr: getattr(lock_detail, attr) for attr in API_CACHED_ATTRS} |
Restore the non-cache attributes after a cached update. | def _restore_live_attrs(
lock_detail: DoorbellDetail | LockDetail, attrs: dict[str, Any]
) -> None:
"""Restore the non-cache attributes after a cached update."""
for attr, value in attrs.items():
setattr(lock_detail, attr, value) |
Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user. | def validate_and_connect(
hass: HomeAssistant, data: Mapping[str, Any]
) -> dict[str, str]:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
comport = data[CONF_PORT]
address = data[CONF_ADDRESS]
_LOGGER.debug("Initialising com port=%s", comport)
ret = {}
ret["title"] = DEFAULT_INTEGRATION_TITLE
try:
client = AuroraSerialClient(address, comport, parity="N", timeout=1)
client.connect()
ret[ATTR_SERIAL_NUMBER] = client.serial_number()
ret[ATTR_MODEL] = f"{client.version()} ({client.pn()})"
ret[ATTR_FIRMWARE] = client.firmware(1)
_LOGGER.info("Returning device info=%s", ret)
except AuroraError:
_LOGGER.warning("Could not connect to device=%s", comport)
raise
finally:
if client.serline.isOpen():
client.close()
# Return info we want to store in the config entry.
return ret |
Find and store available com ports for the GUI dropdown. | def scan_comports() -> tuple[list[str] | None, str | None]:
"""Find and store available com ports for the GUI dropdown."""
com_ports = serial.tools.list_ports.comports(include_links=True)
com_ports_list = []
for port in com_ports:
com_ports_list.append(port.device)
_LOGGER.debug("COM port option: %s", port.device)
if len(com_ports_list) > 0:
return com_ports_list, com_ports_list[0]
_LOGGER.warning("No com ports found. Need a valid RS485 device to communicate")
return None, None |
Verify that the client id is valid. | def verify_client_id(client_id: str) -> bool:
"""Verify that the client id is valid."""
try:
_parse_client_id(client_id)
except ValueError:
return False
return True |
Parse a url in parts and canonicalize according to IndieAuth. | def _parse_url(url: str) -> ParseResult:
"""Parse a url in parts and canonicalize according to IndieAuth."""
parts = urlparse(url)
# Canonicalize a url according to IndieAuth 3.2.
# SHOULD convert the hostname to lowercase
parts = parts._replace(netloc=parts.netloc.lower())
# If a URL with no path component is ever encountered,
# it MUST be treated as if it had the path /.
if parts.path == "":
parts = parts._replace(path="/")
return parts |
Test if client id is a valid URL according to IndieAuth section 3.2.
https://indieauth.spec.indieweb.org/#client-identifier | def _parse_client_id(client_id: str) -> ParseResult:
"""Test if client id is a valid URL according to IndieAuth section 3.2.
https://indieauth.spec.indieweb.org/#client-identifier
"""
parts = _parse_url(client_id)
# Client identifier URLs
# MUST have either an https or http scheme
if parts.scheme not in ("http", "https"):
raise ValueError
# MUST contain a path component
# Handled by url canonicalization.
# MUST NOT contain single-dot or double-dot path segments
if any(segment in (".", "..") for segment in parts.path.split("/")):
raise ValueError(
"Client ID cannot contain single-dot or double-dot path segments"
)
# MUST NOT contain a fragment component
if parts.fragment != "":
raise ValueError("Client ID cannot contain a fragment")
# MUST NOT contain a username or password component
if parts.username is not None:
raise ValueError("Client ID cannot contain username")
if parts.password is not None:
raise ValueError("Client ID cannot contain password")
# MAY contain a port
try:
# parts raises ValueError when port cannot be parsed as int
_ = parts.port
except ValueError as ex:
raise ValueError("Client ID contains invalid port") from ex
# Additionally, hostnames
# MUST be domain names or a loopback interface and
# MUST NOT be IPv4 or IPv6 addresses except for IPv4 127.0.0.1
# or IPv6 [::1]
# We are not goint to follow the spec here. We are going to allow
# any internal network IP to be used inside a client id.
address = None
try:
netloc = parts.netloc
# Strip the [, ] from ipv6 addresses before parsing
if netloc[0] == "[" and netloc[-1] == "]":
netloc = netloc[1:-1]
address = ip_address(netloc)
except ValueError:
# Not an ip address
pass
if address is None or is_local(address):
return parts
raise ValueError("Hostname should be a domain name or local IP address") |
Component to allow users to login. | def async_setup(
hass: HomeAssistant, store_result: Callable[[str, Credentials], str]
) -> None:
"""Component to allow users to login."""
hass.http.register_view(WellKnownOAuthInfoView)
hass.http.register_view(AuthProvidersView)
hass.http.register_view(LoginFlowIndexView(hass.auth.login_flow, store_result))
hass.http.register_view(LoginFlowResourceView(hass.auth.login_flow, store_result)) |
Convert result to JSON. | def _prepare_result_json(
result: AuthFlowResult,
) -> AuthFlowResult:
"""Convert result to JSON."""
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
data = result.copy()
data.pop("result")
data.pop("data")
return data
if result["type"] != data_entry_flow.FlowResultType.FORM:
return result
data = result.copy()
if (schema := data["data_schema"]) is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(schema)
return data |
Init mfa setup flow manager. | def async_setup(hass: HomeAssistant) -> None:
"""Init mfa setup flow manager."""
hass.data[DATA_SETUP_FLOW_MGR] = MfaFlowManager(hass)
websocket_api.async_register_command(
hass, WS_TYPE_SETUP_MFA, websocket_setup_mfa, SCHEMA_WS_SETUP_MFA
)
websocket_api.async_register_command(
hass, WS_TYPE_DEPOSE_MFA, websocket_depose_mfa, SCHEMA_WS_DEPOSE_MFA
) |
Return a setup flow for mfa auth module. | def websocket_setup_mfa(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Return a setup flow for mfa auth module."""
async def async_setup_flow(msg: dict[str, Any]) -> None:
"""Return a setup flow for mfa auth module."""
flow_manager: MfaFlowManager = hass.data[DATA_SETUP_FLOW_MGR]
if (flow_id := msg.get("flow_id")) is not None:
result = await flow_manager.async_configure(flow_id, msg.get("user_input"))
connection.send_message(
websocket_api.result_message(msg["id"], _prepare_result_json(result))
)
return
mfa_module_id = msg["mfa_module_id"]
if hass.auth.get_auth_mfa_module(mfa_module_id) is None:
connection.send_message(
websocket_api.error_message(
msg["id"], "no_module", f"MFA module {mfa_module_id} is not found"
)
)
return
result = await flow_manager.async_init(
mfa_module_id, data={"user_id": connection.user.id}
)
connection.send_message(
websocket_api.result_message(msg["id"], _prepare_result_json(result))
)
hass.async_create_task(async_setup_flow(msg)) |
Remove user from mfa module. | def websocket_depose_mfa(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Remove user from mfa module."""
async def async_depose(msg: dict[str, Any]) -> None:
"""Remove user from mfa auth module."""
mfa_module_id = msg["mfa_module_id"]
try:
await hass.auth.async_disable_user_mfa(
connection.user, msg["mfa_module_id"]
)
except ValueError as err:
connection.send_message(
websocket_api.error_message(
msg["id"],
"disable_failed",
f"Cannot disable MFA Module {mfa_module_id}: {err}",
)
)
return
connection.send_message(websocket_api.result_message(msg["id"], "done"))
hass.async_create_task(async_depose(msg)) |
Convert result to JSON. | def _prepare_result_json(
result: data_entry_flow.FlowResult,
) -> data_entry_flow.FlowResult:
"""Convert result to JSON."""
if result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY:
return result.copy()
if result["type"] != data_entry_flow.FlowResultType.FORM:
return result
data = result.copy()
if (schema := data["data_schema"]) is None:
data["data_schema"] = []
else:
data["data_schema"] = voluptuous_serialize.convert(schema)
return data |
Create an authorization code to fetch tokens. | def create_auth_code(
hass: HomeAssistant, client_id: str, credential: Credentials
) -> str:
"""Create an authorization code to fetch tokens."""
return cast(StoreResultType, hass.data[DOMAIN])(client_id, credential) |
Create an in memory store. | def _create_auth_code_store() -> tuple[StoreResultType, RetrieveResultType]:
"""Create an in memory store."""
temp_results: dict[tuple[str, str], tuple[datetime, Credentials]] = {}
@callback
def store_result(client_id: str, result: Credentials) -> str:
"""Store flow result and return a code to retrieve it."""
if not isinstance(result, Credentials):
raise TypeError("result has to be a Credentials instance")
code = uuid.uuid4().hex
temp_results[(client_id, code)] = (
dt_util.utcnow(),
result,
)
return code
@callback
def retrieve_result(client_id: str, code: str) -> Credentials | None:
"""Retrieve flow result."""
key = (client_id, code)
if key not in temp_results:
return None
created, result = temp_results.pop(key)
# OAuth 4.2.1
# The authorization code MUST expire shortly after it is issued to
# mitigate the risk of leaks. A maximum authorization code lifetime of
# 10 minutes is RECOMMENDED.
if dt_util.utcnow() - created < timedelta(minutes=10):
return result
return None
return store_result, retrieve_result |
Return metadata of users refresh tokens. | def websocket_refresh_tokens(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Return metadata of users refresh tokens."""
current_id = connection.refresh_token_id
tokens: list[dict[str, Any]] = []
for refresh in connection.user.refresh_tokens.values():
if refresh.credential:
auth_provider_type = refresh.credential.auth_provider_type
else:
auth_provider_type = None
tokens.append(
{
"id": refresh.id,
"client_id": refresh.client_id,
"client_name": refresh.client_name,
"client_icon": refresh.client_icon,
"type": refresh.token_type,
"created_at": refresh.created_at,
"is_current": refresh.id == current_id,
"last_used_at": refresh.last_used_at,
"last_used_ip": refresh.last_used_ip,
"auth_provider_type": auth_provider_type,
}
)
connection.send_result(msg["id"], tokens) |
Handle a delete refresh token request. | def websocket_delete_refresh_token(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Handle a delete refresh token request."""
refresh_token = connection.user.refresh_tokens.get(msg["refresh_token_id"])
if refresh_token is None:
connection.send_error(msg["id"], "invalid_token_id", "Received invalid token")
return
hass.auth.async_remove_refresh_token(refresh_token)
connection.send_result(msg["id"], {}) |
Handle delete all refresh tokens request. | def websocket_delete_all_refresh_tokens(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Handle delete all refresh tokens request."""
current_refresh_token: RefreshToken
remove_failed = False
token_type = msg.get("token_type")
delete_current_token = msg.get("delete_current_token")
limit_token_types = token_type is not None
for token in list(connection.user.refresh_tokens.values()):
if token.id == connection.refresh_token_id:
# Skip the current refresh token as it has revoke_callback,
# which cancels/closes the connection.
# It will be removed after sending the result.
current_refresh_token = token
continue
if limit_token_types and token_type != token.token_type:
continue
try:
hass.auth.async_remove_refresh_token(token)
except Exception as err: # pylint: disable=broad-except
getLogger(__name__).exception(
"During refresh token removal, the following error occurred: %s",
err,
)
remove_failed = True
if remove_failed:
connection.send_error(
msg["id"], "token_removing_error", "During removal, an error was raised."
)
else:
connection.send_result(msg["id"], {})
if delete_current_token and (
not limit_token_types or current_refresh_token.token_type == token_type
):
# This will close the connection so we need to send the result first.
hass.loop.call_soon(hass.auth.async_remove_refresh_token, current_refresh_token) |
Handle a sign path request. | def websocket_sign_path(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
) -> None:
"""Handle a sign path request."""
connection.send_message(
websocket_api.result_message(
msg["id"],
{
"path": async_sign_path(
hass,
msg["path"],
timedelta(seconds=msg["expires"]),
)
},
)
) |
Return True if any automation references the blueprint. | def _blueprint_in_use(hass: HomeAssistant, blueprint_path: str) -> bool:
"""Return True if any automation references the blueprint."""
from . import automations_with_blueprint # pylint: disable=import-outside-toplevel
return len(automations_with_blueprint(hass, blueprint_path)) > 0 |
Get automation blueprints. | def async_get_blueprints(hass: HomeAssistant) -> blueprint.DomainBlueprints:
"""Get automation blueprints."""
return blueprint.DomainBlueprints(
hass, DOMAIN, LOGGER, _blueprint_in_use, _reload_blueprint_automations
) |
Describe logbook events. | def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[
[str, str, Callable[[LazyEventPartialState], dict[str, Any]]], None
],
) -> None:
"""Describe logbook events."""
@callback
def async_describe_logbook_event(event: LazyEventPartialState) -> dict[str, Any]:
"""Describe a logbook event."""
data = event.data
message = "triggered"
if ATTR_SOURCE in data:
message = f"{message} by {data[ATTR_SOURCE]}"
return {
LOGBOOK_ENTRY_NAME: data.get(ATTR_NAME),
LOGBOOK_ENTRY_MESSAGE: message,
LOGBOOK_ENTRY_SOURCE: data.get(ATTR_SOURCE),
LOGBOOK_ENTRY_ENTITY_ID: data.get(ATTR_ENTITY_ID),
LOGBOOK_ENTRY_CONTEXT_ID: event.context_id,
}
async_describe_event(
DOMAIN, EVENT_AUTOMATION_TRIGGERED, async_describe_logbook_event
) |
Trace action execution of automation with automation_id. | def trace_automation(
hass: HomeAssistant,
automation_id: str | None,
config: ConfigType | None,
blueprint_inputs: ConfigType | None,
context: Context,
trace_config: ConfigType,
) -> Generator[AutomationTrace, None, None]:
"""Trace action execution of automation with automation_id."""
trace = AutomationTrace(automation_id, config, blueprint_inputs, context)
async_store_trace(hass, trace, trace_config[CONF_STORED_TRACES])
try:
yield trace
except Exception as ex:
if automation_id:
trace.set_error(ex)
raise
finally:
if automation_id:
trace.finished() |
Return true if specified automation entity_id is on.
Async friendly. | def is_on(hass: HomeAssistant, entity_id: str) -> bool:
"""Return true if specified automation entity_id is on.
Async friendly.
"""
return hass.states.is_state(entity_id, STATE_ON) |
Return all automations that reference the x. | def _automations_with_x(
hass: HomeAssistant, referenced_id: str, property_name: str
) -> list[str]:
"""Return all automations that reference the x."""
if DOMAIN not in hass.data:
return []
component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN]
return [
automation_entity.entity_id
for automation_entity in component.entities
if referenced_id in getattr(automation_entity, property_name)
] |
Return all x in an automation. | def _x_in_automation(
hass: HomeAssistant, entity_id: str, property_name: str
) -> list[str]:
"""Return all x in an automation."""
if DOMAIN not in hass.data:
return []
component: EntityComponent[BaseAutomationEntity] = hass.data[DOMAIN]
if (automation_entity := component.get_entity(entity_id)) is None:
return []
return list(getattr(automation_entity, property_name)) |
Return all automations that reference the entity. | def automations_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Return all automations that reference the entity."""
return _automations_with_x(hass, entity_id, "referenced_entities") |
Return all entities in an automation. | def entities_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Return all entities in an automation."""
return _x_in_automation(hass, entity_id, "referenced_entities") |
Return all automations that reference the device. | def automations_with_device(hass: HomeAssistant, device_id: str) -> list[str]:
"""Return all automations that reference the device."""
return _automations_with_x(hass, device_id, "referenced_devices") |
Return all devices in an automation. | def devices_in_automation(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Return all devices in an automation."""
return _x_in_automation(hass, entity_id, "referenced_devices") |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.