response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Get the current value in kW. | def _get_meter_power(meter: MeterResponse) -> float:
"""Get the current value in kW."""
return meter.get_power(precision=3) |
Get the current value in Hz. | def _get_meter_frequency(meter: MeterResponse) -> float:
"""Get the current value in Hz."""
return round(meter.frequency, 1) |
Get the current value in A. | def _get_meter_total_current(meter: MeterResponse) -> float:
"""Get the current value in A."""
return meter.get_instant_total_current() |
Get the current value in V. | def _get_meter_average_voltage(meter: MeterResponse) -> float:
"""Get the current value in V."""
return round(meter.instant_average_voltage, 1) |
Return True if the last update was successful. | def async_last_update_was_successful(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Return True if the last update was successful."""
return bool(
(domain_data := hass.data.get(DOMAIN))
and (entry_data := domain_data.get(entry.entry_id))
and (coordinator := entry_data.get(POWERWALL_COORDINATOR))
and coordinator.last_update_success
) |
Find a BluetoothServiceInfoBleak for the irk.
This iterates over all currently visible mac addresses and checks them against `irk`.
It returns the newest. | def async_last_service_info(
hass: HomeAssistant, irk: bytes
) -> bluetooth.BluetoothServiceInfoBleak | None:
"""Find a BluetoothServiceInfoBleak for the irk.
This iterates over all currently visible mac addresses and checks them against `irk`.
It returns the newest.
"""
# This can't use existing data collected by the coordinator - its called when
# the coordinator doesn't know about the IRK, so doesn't optimise this lookup.
cur: bluetooth.BluetoothServiceInfoBleak | None = None
cipher = get_cipher_for_irk(irk)
for service_info in bluetooth.async_discovered_service_info(hass, False):
if resolve_private_address(cipher, service_info.address):
if not cur or cur.time < service_info.time:
cur = service_info
return cur |
Create or return an existing PrivateDeviceManager.
There should only be one per HomeAssistant instance. Associating private
mac addresses with an IRK involves AES operations. We don't want to
duplicate that work. | def async_get_coordinator(hass: HomeAssistant) -> PrivateDevicesCoordinator:
"""Create or return an existing PrivateDeviceManager.
There should only be one per HomeAssistant instance. Associating private
mac addresses with an IRK involves AES operations. We don't want to
duplicate that work.
"""
if existing := hass.data.get(DOMAIN):
return cast(PrivateDevicesCoordinator, existing)
pdm = hass.data[DOMAIN] = PrivateDevicesCoordinator(hass)
return pdm |
Get the absolute file path of a function. | def _get_function_absfile(func: Any) -> str | None:
"""Get the absolute file path of a function."""
import inspect # pylint: disable=import-outside-toplevel
abs_file: str | None = None
with suppress(Exception):
abs_file = inspect.getabsfile(func)
return abs_file |
Get the repr of an object but keep going if there is an exception.
We wrap repr to ensure if one object cannot be serialized, we can
still get the rest. | def _safe_repr(obj: Any) -> str:
"""Get the repr of an object but keep going if there is an exception.
We wrap repr to ensure if one object cannot be serialized, we can
still get the rest.
"""
try:
return repr(obj)
except Exception: # pylint: disable=broad-except
return f"Failed to serialize {type(obj)}" |
Initialize the input pin. | def setup_input(api: ProgettiHWSWAPI, input_number: int) -> Input:
"""Initialize the input pin."""
return api.get_input(input_number) |
Initialize the output pin. | def setup_switch(api: ProgettiHWSWAPI, switch_number: int, mode: str) -> Relay:
"""Initialize the output pin."""
return api.get_relay(switch_number, mode) |
Set up the Proliphix thermostats. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Proliphix thermostats."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
host = config.get(CONF_HOST)
pdp = proliphix.PDP(host, username, password)
pdp.update()
add_entities([ProliphixThermostat(pdp)], True) |
Activate Prometheus component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Activate Prometheus component."""
hass.http.register_view(PrometheusView(config[DOMAIN][CONF_REQUIRES_AUTH]))
conf: dict[str, Any] = config[DOMAIN]
entity_filter: entityfilter.EntityFilter = conf[CONF_FILTER]
namespace: str = conf[CONF_PROM_NAMESPACE]
climate_units = hass.config.units.temperature_unit
override_metric: str | None = conf.get(CONF_OVERRIDE_METRIC)
default_metric: str | None = conf.get(CONF_DEFAULT_METRIC)
component_config = EntityValues(
conf[CONF_COMPONENT_CONFIG],
conf[CONF_COMPONENT_CONFIG_DOMAIN],
conf[CONF_COMPONENT_CONFIG_GLOB],
)
metrics = PrometheusMetrics(
entity_filter,
namespace,
climate_units,
component_config,
override_metric,
default_metric,
)
hass.bus.listen(EVENT_STATE_CHANGED, metrics.handle_state_changed_event)
hass.bus.listen(
EVENT_ENTITY_REGISTRY_UPDATED,
metrics.handle_entity_registry_updated,
)
for state in hass.states.all():
if entity_filter(state.entity_id):
metrics.handle_state(state)
return True |
Get list of related automations and scripts. | def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Get list of related automations and scripts."""
used_in = automations_with_entity(hass, entity_id)
used_in += scripts_with_entity(hass, entity_id)
return used_in |
Create a binary sensor based on the given data. | def create_binary_sensor(
coordinator,
host_name: str,
node_name: str,
vm_id: int,
name: str,
) -> ProxmoxBinarySensor:
"""Create a binary sensor based on the given data."""
return ProxmoxBinarySensor(
coordinator=coordinator,
unique_id=f"proxmox_{node_name}_{vm_id}_running",
name=f"{node_name}_{name}",
icon="",
host_name=host_name,
node_name=node_name,
vm_id=vm_id,
) |
Create and return a DataUpdateCoordinator for a vm/container. | def create_coordinator_container_vm(
hass: HomeAssistant,
proxmox: ProxmoxAPI,
host_name: str,
node_name: str,
vm_id: int,
vm_type: int,
) -> DataUpdateCoordinator[dict[str, Any] | None]:
"""Create and return a DataUpdateCoordinator for a vm/container."""
async def async_update_data() -> dict[str, Any] | None:
"""Call the api and handle the response."""
def poll_api() -> dict[str, Any] | None:
"""Call the api."""
return call_api_container_vm(proxmox, node_name, vm_id, vm_type)
vm_status = await hass.async_add_executor_job(poll_api)
if vm_status is None:
_LOGGER.warning(
"Vm/Container %s unable to be found in node %s", vm_id, node_name
)
return None
return parse_api_container_vm(vm_status)
return DataUpdateCoordinator(
hass,
_LOGGER,
name=f"proxmox_coordinator_{host_name}_{node_name}_{vm_id}",
update_method=async_update_data,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
) |
Get the container or vm api data and return it formatted in a dictionary.
It is implemented in this way to allow for more data to be added for sensors
in the future. | def parse_api_container_vm(status: dict[str, Any]) -> dict[str, Any]:
"""Get the container or vm api data and return it formatted in a dictionary.
It is implemented in this way to allow for more data to be added for sensors
in the future.
"""
return {"status": status["status"], "name": status["name"]} |
Make proper api calls. | def call_api_container_vm(
proxmox: ProxmoxAPI,
node_name: str,
vm_id: int,
machine_type: int,
) -> dict[str, Any] | None:
"""Make proper api calls."""
status = None
try:
if machine_type == TYPE_VM:
status = proxmox.nodes(node_name).qemu(vm_id).status.current.get()
elif machine_type == TYPE_CONTAINER:
status = proxmox.nodes(node_name).lxc(vm_id).status.current.get()
except (ResourceException, requests.exceptions.ConnectionError):
return None
return status |
Perform some pre-checks on the given image. | def _precheck_image(image, opts):
"""Perform some pre-checks on the given image."""
if not opts:
raise ValueError
try:
img = Image.open(io.BytesIO(image))
except OSError as err:
_LOGGER.warning("Failed to open image")
raise ValueError from err
imgfmt = str(img.format)
if imgfmt not in ("PNG", "JPEG"):
_LOGGER.warning("Image is of unsupported type: %s", imgfmt)
raise ValueError
if img.mode != "RGB":
img = img.convert("RGB")
return img |
Resize image. | def _resize_image(image, opts):
"""Resize image."""
try:
img = _precheck_image(image, opts)
except ValueError:
return image
quality = opts.quality or DEFAULT_QUALITY
new_width = opts.max_width
(old_width, old_height) = img.size
old_size = len(image)
if old_width <= new_width:
if opts.quality is None:
_LOGGER.debug("Image is smaller-than/equal-to requested width")
return image
new_width = old_width
scale = new_width / float(old_width)
new_height = int(float(old_height) * float(scale))
img = img.resize((new_width, new_height), Image.Resampling.LANCZOS)
imgbuf = io.BytesIO()
img.save(imgbuf, "JPEG", optimize=True, quality=quality)
newimage = imgbuf.getvalue()
if not opts.force_resize and len(newimage) >= old_size:
_LOGGER.debug(
(
"Using original image (%d bytes) "
"because resized image (%d bytes) is not smaller"
),
old_size,
len(newimage),
)
return image
_LOGGER.debug(
"Resized image from (%dx%d - %d bytes) to (%dx%d - %d bytes)",
old_width,
old_height,
old_size,
new_width,
new_height,
len(newimage),
)
return newimage |
Crop image. | def _crop_image(image, opts):
"""Crop image."""
try:
img = _precheck_image(image, opts)
except ValueError:
return image
quality = opts.quality or DEFAULT_QUALITY
(old_width, old_height) = img.size
old_size = len(image)
if opts.top is None:
opts.top = 0
if opts.left is None:
opts.left = 0
if opts.max_width is None or opts.max_width > old_width - opts.left:
opts.max_width = old_width - opts.left
if opts.max_height is None or opts.max_height > old_height - opts.top:
opts.max_height = old_height - opts.top
img = img.crop(
(opts.left, opts.top, opts.left + opts.max_width, opts.top + opts.max_height)
)
imgbuf = io.BytesIO()
img.save(imgbuf, "JPEG", optimize=True, quality=quality)
newimage = imgbuf.getvalue()
_LOGGER.debug(
"Cropped image from (%dx%d - %d bytes) to (%dx%d - %d bytes)",
old_width,
old_height,
old_size,
opts.max_width,
opts.max_height,
len(newimage),
)
return newimage |
Raise NotSupported exception if the printer is not supported. | def ensure_printer_is_supported(version: VersionInfo) -> None:
"""Raise NotSupported exception if the printer is not supported."""
try:
if AwesomeVersion("2.0.0") <= AwesomeVersion(version["api"]):
return
# Workaround to allow PrusaLink 0.7.2 on MK3 and MK2.5 that supports
# the 2.0.0 API, but doesn't advertise it yet
if version.get("original", "").startswith(
("PrusaLink I3MK3", "PrusaLink I3MK2")
) and AwesomeVersion("0.7.2") <= AwesomeVersion(version["server"]):
return
except AwesomeVersionException as err:
raise NotSupported from err
raise NotSupported |
Use last 4 Chars of credential as suffix. Unique ID per PSN user. | def format_unique_id(creds, mac_address):
"""Use last 4 Chars of credential as suffix. Unique ID per PSN user."""
suffix = creds[-4:]
return f"{mac_address}_{suffix}" |
Load games for sources. | def load_games(hass: HomeAssistant, unique_id: str) -> JsonObjectType:
"""Load games for sources."""
g_file = hass.config.path(GAMES_FILE.format(unique_id))
try:
games = load_json_object(g_file)
except HomeAssistantError as error:
games = {}
_LOGGER.error("Failed to load games file: %s", error)
# If file exists
if os.path.isfile(g_file):
games = _reformat_data(hass, games, unique_id)
return games |
Save games to file. | def save_games(hass: HomeAssistant, games: dict, unique_id: str):
"""Save games to file."""
g_file = hass.config.path(GAMES_FILE.format(unique_id))
try:
save_json(g_file, games)
except OSError as error:
_LOGGER.error("Could not save game list, %s", error) |
Reformat data to correct format. | def _reformat_data(hass: HomeAssistant, games: dict, unique_id: str) -> dict:
"""Reformat data to correct format."""
data_reformatted = False
for game, data in games.items():
# Convert str format to dict format.
if not isinstance(data, dict):
# Use existing title. Assign defaults.
games[game] = {
ATTR_LOCKED: False,
ATTR_MEDIA_TITLE: data,
ATTR_MEDIA_IMAGE_URL: None,
ATTR_MEDIA_CONTENT_TYPE: MediaType.GAME,
}
data_reformatted = True
_LOGGER.debug("Reformatting media data for item: %s, %s", game, data)
if data_reformatted:
save_games(hass, games, unique_id)
return games |
Handle for services. | def service_handle(hass: HomeAssistant):
"""Handle for services."""
async def async_service_command(call: ServiceCall) -> None:
"""Service for sending commands."""
entity_ids = call.data[ATTR_ENTITY_ID]
command = call.data[ATTR_COMMAND]
for device in hass.data[PS4_DATA].devices:
if device.entity_id in entity_ids:
await device.async_send_command(command)
hass.services.async_register(
DOMAIN, SERVICE_COMMAND, async_service_command, schema=PS4_COMMAND_SCHEMA
) |
Read in all of our configuration, and initialize the loopback switch. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Read in all of our configuration, and initialize the loopback switch."""
name = config.get(CONF_NAME)
sink_name = config.get(CONF_SINK_NAME)
source_name = config.get(CONF_SOURCE_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
hass.data.setdefault(DOMAIN, {})
server_id = str.format("{0}:{1}", host, port)
if host:
connect_to_server = server_id
else:
connect_to_server = None
if server_id in hass.data[DOMAIN]:
server = hass.data[DOMAIN][server_id]
else:
server = Pulse(server=connect_to_server, connect=False, threading_lock=True)
hass.data[DOMAIN][server_id] = server
add_entities([PALoopbackSwitch(name, server, sink_name, source_name)], True) |
Get an aiopurpleair API object. | def async_get_api(hass: HomeAssistant, api_key: str) -> API:
"""Get an aiopurpleair API object."""
session = aiohttp_client.async_get_clientsession(hass)
return API(api_key, session=session) |
Define a schema for searching for sensors near a coordinate pair. | def async_get_coordinates_schema(hass: HomeAssistant) -> vol.Schema:
"""Define a schema for searching for sensors near a coordinate pair."""
return vol.Schema(
{
vol.Inclusive(
CONF_LATITUDE, "coords", default=hass.config.latitude
): cv.latitude,
vol.Inclusive(
CONF_LONGITUDE, "coords", default=hass.config.longitude
): cv.longitude,
vol.Optional(CONF_DISTANCE, default=DEFAULT_DISTANCE): cv.positive_int,
}
) |
Return a set of nearby sensors as SelectOptionDict objects. | def async_get_nearby_sensors_options(
nearby_sensor_results: list[NearbySensorResult],
) -> list[SelectOptionDict]:
"""Return a set of nearby sensors as SelectOptionDict objects."""
return [
SelectOptionDict(
value=str(result.sensor.sensor_index), label=cast(str, result.sensor.name)
)
for result in nearby_sensor_results
] |
Define a schema for selecting a sensor from a list. | def async_get_nearby_sensors_schema(options: list[SelectOptionDict]) -> vol.Schema:
"""Define a schema for selecting a sensor from a list."""
return vol.Schema(
{
vol.Required(CONF_SENSOR_INDEX): SelectSelector(
SelectSelectorConfig(options=options, mode=SelectSelectorMode.DROPDOWN)
)
}
) |
Return a set of already-configured sensors as SelectOptionDict objects. | def async_get_remove_sensor_options(
hass: HomeAssistant, config_entry: ConfigEntry
) -> list[SelectOptionDict]:
"""Return a set of already-configured sensors as SelectOptionDict objects."""
device_registry = dr.async_get(hass)
return [
SelectOptionDict(value=device_entry.id, label=cast(str, device_entry.name))
for device_entry in device_registry.devices.get_devices_for_config_entry_id(
config_entry.entry_id
)
] |
Define a schema removing a sensor. | def async_get_remove_sensor_schema(sensors: list[SelectOptionDict]) -> vol.Schema:
"""Define a schema removing a sensor."""
return vol.Schema(
{
vol.Required(CONF_SENSOR_DEVICE_ID): SelectSelector(
SelectSelectorConfig(options=sensors, mode=SelectSelectorMode.DROPDOWN)
)
}
) |
Get the Pushsafer.com notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> PushsaferNotificationService:
"""Get the Pushsafer.com notification service."""
return PushsaferNotificationService(
config.get(CONF_DEVICE_KEY), hass.config.is_allowed_path
) |
Get enabled API indicators. | def get_enabled_sensor_keys(
using_private_api: bool, entries: list[RegistryEntry]
) -> set[str]:
"""Get enabled API indicators."""
if not using_private_api:
return {KEY_PVPC}
if len(entries) > 1:
# activate only enabled sensors
return {
_ha_uniqueid_to_sensor_key[sensor.unique_id]
for sensor in entries
if not sensor.disabled
}
# default sensors when enabling token access
return {KEY_PVPC, KEY_INJECTION} |
Generate unique_id for each sensor kind and config entry. | def make_sensor_unique_id(config_entry_id: str | None, sensor_key: str) -> str:
"""Generate unique_id for each sensor kind and config entry."""
assert sensor_key in ALL_SENSORS
assert config_entry_id is not None
if sensor_key == KEY_PVPC:
# for old compatibility
return config_entry_id
return f"{config_entry_id}_{sensor_key}" |
Set up the pyLoad sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the pyLoad sensors."""
host = config[CONF_HOST]
port = config[CONF_PORT]
protocol = "https" if config[CONF_SSL] else "http"
name = config[CONF_NAME]
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
monitored_types = config[CONF_MONITORED_VARIABLES]
url = f"{protocol}://{host}:{port}/api/"
try:
pyloadapi = PyLoadAPI(api_url=url, username=username, password=password)
except (
requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
) as conn_err:
_LOGGER.error("Error setting up pyLoad API: %s", conn_err)
return
devices = []
for ng_type in monitored_types:
new_sensor = PyLoadSensor(
api=pyloadapi, sensor_type=SENSOR_TYPES[ng_type], client_name=name
)
devices.append(new_sensor)
add_entities(devices, True) |
Initialize the Python script component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Initialize the Python script component."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
discover_scripts(hass)
def reload_scripts_handler(call: ServiceCall) -> None:
"""Handle reload service calls."""
discover_scripts(hass)
hass.services.register(DOMAIN, SERVICE_RELOAD, reload_scripts_handler)
return True |
Discover python scripts in folder. | def discover_scripts(hass):
"""Discover python scripts in folder."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
def python_script_service_handler(call: ServiceCall) -> ServiceResponse:
"""Handle python script service calls."""
return execute_script(hass, call.service, call.data, call.return_response)
existing = hass.services.services.get(DOMAIN, {}).keys()
for existing_service in existing:
if existing_service == SERVICE_RELOAD:
continue
hass.services.remove(DOMAIN, existing_service)
# Load user-provided service descriptions from python_scripts/services.yaml
services_yaml = os.path.join(path, "services.yaml")
if os.path.exists(services_yaml):
services_dict = load_yaml_dict(services_yaml)
else:
services_dict = {}
for fil in glob.iglob(os.path.join(path, "*.py")):
name = os.path.splitext(os.path.basename(fil))[0]
hass.services.register(
DOMAIN,
name,
python_script_service_handler,
supports_response=SupportsResponse.OPTIONAL,
)
service_desc = {
CONF_NAME: services_dict.get(name, {}).get("name", name),
CONF_DESCRIPTION: services_dict.get(name, {}).get("description", ""),
CONF_FIELDS: services_dict.get(name, {}).get("fields", {}),
}
async_set_service_schema(hass, DOMAIN, name, service_desc) |
Implement augmented-assign (+=, -=, etc.) operators for restricted code.
See RestrictedPython's `visit_AugAssign` for details. | def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any:
"""Implement augmented-assign (+=, -=, etc.) operators for restricted code.
See RestrictedPython's `visit_AugAssign` for details.
"""
if not isinstance(target, (list, Number, str)):
raise ScriptError(f"The {op!r} operation is not allowed on a {type(target)}")
op_fun = IOPERATOR_TO_OPERATOR.get(op)
if not op_fun:
raise ScriptError(f"The {op!r} operation is not allowed")
return op_fun(target, operand) |
Execute a script. | def execute_script(hass, name, data=None, return_response=False):
"""Execute a script."""
filename = f"{name}.py"
raise_if_invalid_filename(filename)
with open(hass.config.path(FOLDER, filename), encoding="utf8") as fil:
source = fil.read()
return execute(hass, filename, source, data, return_response=return_response) |
Execute Python source. | def execute(hass, filename, source, data=None, return_response=False):
"""Execute Python source."""
compiled = compile_restricted_exec(source, filename=filename)
if compiled.errors:
_LOGGER.error(
"Error loading script %s: %s", filename, ", ".join(compiled.errors)
)
return
if compiled.warnings:
_LOGGER.warning(
"Warning loading script %s: %s", filename, ", ".join(compiled.warnings)
)
def protected_getattr(obj, name, default=None):
"""Restricted method to get attributes."""
if name.startswith("async_"):
raise ScriptError("Not allowed to access async methods")
if (
obj is hass
and name not in ALLOWED_HASS
or obj is hass.bus
and name not in ALLOWED_EVENTBUS
or obj is hass.states
and name not in ALLOWED_STATEMACHINE
or obj is hass.services
and name not in ALLOWED_SERVICEREGISTRY
or obj is dt_util
and name not in ALLOWED_DT_UTIL
or obj is datetime
and name not in ALLOWED_DATETIME
or isinstance(obj, TimeWrapper)
and name not in ALLOWED_TIME
):
raise ScriptError(f"Not allowed to access {obj.__class__.__name__}.{name}")
return getattr(obj, name, default)
extra_builtins = {
"datetime": datetime,
"sorted": sorted,
"time": TimeWrapper(),
"dt_util": dt_util,
"min": min,
"max": max,
"sum": sum,
"any": any,
"all": all,
"enumerate": enumerate,
}
builtins = safe_builtins.copy()
builtins.update(utility_builtins)
builtins.update(limited_builtins)
builtins.update(extra_builtins)
logger = logging.getLogger(f"{__name__}.{filename}")
restricted_globals = {
"__builtins__": builtins,
"_print_": StubPrinter,
"_getattr_": protected_getattr,
"_write_": full_write_guard,
"_getiter_": iter,
"_getitem_": default_guarded_getitem,
"_iter_unpack_sequence_": guarded_iter_unpack_sequence,
"_unpack_sequence_": guarded_unpack_sequence,
"_inplacevar_": guarded_inplacevar,
"hass": hass,
"data": data or {},
"logger": logger,
"output": {},
}
try:
_LOGGER.info("Executing %s: %s", filename, data)
# pylint: disable-next=exec-used
exec(compiled.code, restricted_globals) # noqa: S102
_LOGGER.debug(
"Output of python_script: `%s`:\n%s",
filename,
restricted_globals["output"],
)
# Ensure that we're always returning a dictionary
if not isinstance(restricted_globals["output"], dict):
output_type = type(restricted_globals["output"])
restricted_globals["output"] = {}
raise ScriptError(
f"Expected `output` to be a dictionary, was {output_type}"
)
except ScriptError as err:
if return_response:
raise ServiceValidationError(f"Error executing script: {err}") from err
logger.error("Error executing script: %s", err)
return None
except Exception as err: # pylint: disable=broad-except
if return_response:
raise HomeAssistantError(
f"Error executing script ({type(err).__name__}): {err}"
) from err
logger.exception("Error executing script")
return None
return restricted_globals["output"] |
Create a qBittorrent client. | def setup_client(url: str, username: str, password: str, verify_ssl: bool) -> Client:
"""Create a qBittorrent client."""
client = Client(url, verify=verify_ssl)
client.login(username, password)
# Get an arbitrary attribute to test if connection succeeds
client.get_alternative_speed_status()
return client |
Convert seconds to HH:MM:SS format. | def seconds_to_hhmmss(seconds) -> str:
"""Convert seconds to HH:MM:SS format."""
if seconds == 8640000:
return "None"
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
return f"{int(hours):02}:{int(minutes):02}:{int(seconds):02}" |
Format a UNIX timestamp to a human-readable date. | def format_unix_timestamp(timestamp) -> str:
"""Format a UNIX timestamp to a human-readable date."""
dt_object = datetime.fromtimestamp(timestamp, tz=UTC)
return dt_object.isoformat() |
Format the progress of a torrent. | def format_progress(torrent) -> str:
"""Format the progress of a torrent."""
progress = torrent["progress"]
progress = float(progress) * 100
return f"{progress:.2f}" |
Format a list of torrents. | def format_torrents(torrents: list[dict[str, Any]]) -> dict[str, dict[str, Any]]:
"""Format a list of torrents."""
value = {}
for torrent in torrents:
value[torrent["name"]] = format_torrent(torrent)
return value |
Format a single torrent. | def format_torrent(torrent) -> dict[str, Any]:
"""Format a single torrent."""
value = {}
value["id"] = torrent["hash"]
value["added_date"] = format_unix_timestamp(torrent["added_on"])
value["percent_done"] = format_progress(torrent)
value["status"] = torrent["state"]
value["eta"] = seconds_to_hhmmss(torrent["eta"])
value["ratio"] = "{:.2f}".format(float(torrent["ratio"]))
return value |
Get current download/upload state. | def get_state(coordinator: QBittorrentDataCoordinator) -> str:
"""Get current download/upload state."""
upload = coordinator.data["server_state"]["up_info_speed"]
download = coordinator.data["server_state"]["dl_info_speed"]
if upload > 0 and download > 0:
return STATE_UP_DOWN
if upload > 0 and download == 0:
return STATE_SEEDING
if upload == 0 and download > 0:
return STATE_DOWNLOADING
return STATE_IDLE |
Count the number of torrents in specified states. | def count_torrents_in_states(
coordinator: QBittorrentDataCoordinator, states: list[str]
) -> int:
"""Count the number of torrents in specified states."""
# When torrents are not in the returned data, there are none, return 0.
if "torrents" not in coordinator.data:
return 0
if not states:
return len(coordinator.data["torrents"])
return len(
[
torrent
for torrent in coordinator.data["torrents"].values()
if torrent["state"] in states
]
) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): BINARY_SENSOR_DESCRIPTIONS[
description.device_class
]
for device_key, description in sensor_update.binary_entity_descriptions.items()
if description.device_class
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
) |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Set up the Queensland Bushfire Alert Feed platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Queensland Bushfire Alert Feed platform."""
scan_interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
coordinates: tuple[float, float] = (
config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude),
)
radius_in_km: float = config[CONF_RADIUS]
categories: list[str] = config[CONF_CATEGORIES]
# Initialize the entity manager.
feed = QldBushfireFeedEntityManager(
hass, add_entities, scan_interval, coordinates, radius_in_km, categories
)
def start_feed_manager(event: Event) -> None:
"""Start feed manager."""
feed.startup()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager) |
Set up the QR code image processing platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the QR code image processing platform."""
add_entities(
QrEntity(camera[CONF_ENTITY_ID], camera.get(CONF_NAME))
for camera in config[CONF_SOURCE]
) |
Validate the configuration and return a Quantum Gateway scanner. | def get_scanner(
hass: HomeAssistant, config: ConfigType
) -> QuantumGatewayDeviceScanner | None:
"""Validate the configuration and return a Quantum Gateway scanner."""
scanner = QuantumGatewayDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
Set up the QVR Pro camera platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the QVR Pro camera platform."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
entities = []
for channel in hass.data[DOMAIN]["channels"]:
stream_source = get_stream_source(channel["guid"], client)
entities.append(
QVRProCamera(**channel, stream_source=stream_source, client=client)
)
add_entities(entities) |
Get channel stream source. | def get_stream_source(guid, client):
"""Get channel stream source."""
try:
resp = client.get_channel_live_stream(guid, protocol="rtsp")
except QVRResponseError as ex:
_LOGGER.error(ex)
return None
full_url = resp["resourceUris"]
protocol = full_url[:7]
auth = f"{client.get_auth_string()}@"
url = full_url[7:]
return f"{protocol}{auth}{url}" |
Set up the QVR Pro component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the QVR Pro component."""
conf = config[DOMAIN]
user = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
host = conf[CONF_HOST]
port = conf[CONF_PORT]
excluded_channels = conf[CONF_EXCLUDE_CHANNELS]
try:
qvrpro = Client(user, password, host, port=port)
channel_resp = qvrpro.get_channel_list()
except InsufficientPermissionsError:
_LOGGER.error("User must have Surveillance Management permission")
return False
except AuthenticationError:
_LOGGER.error("Authentication failed")
return False
except RequestsConnectionError:
_LOGGER.error("Error connecting to QVR server")
return False
channels = []
for channel in channel_resp["channels"]:
if channel["channel_index"] + 1 in excluded_channels:
continue
channels.append(channel)
hass.data[DOMAIN] = {"channels": channels, "client": qvrpro}
load_platform(hass, Platform.CAMERA, DOMAIN, {}, config)
# Register services
def handle_start_record(call: ServiceCall) -> None:
guid = call.data[SERVICE_CHANNEL_GUID]
qvrpro.start_recording(guid)
def handle_stop_record(call: ServiceCall) -> None:
guid = call.data[SERVICE_CHANNEL_GUID]
qvrpro.stop_recording(guid)
hass.services.register(
DOMAIN,
SERVICE_START_RECORD,
handle_start_record,
schema=SERVICE_CHANNEL_RECORD_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_STOP_RECORD,
handle_stop_record,
schema=SERVICE_CHANNEL_RECORD_SCHEMA,
)
return True |
HTTP status codes that mean invalid auth. | def is_invalid_auth_code(http_status_code: int) -> bool:
"""HTTP status codes that mean invalid auth."""
return http_status_code in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN) |
Register a webhook. | def async_register_webhook(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Register a webhook."""
webhook_id: str = entry.data[CONF_WEBHOOK_ID]
async def _async_handle_rachio_webhook(
hass: HomeAssistant, webhook_id: str, request: web.Request
) -> web.Response:
"""Handle webhook calls from the server."""
person: RachioPerson = hass.data[DOMAIN][entry.entry_id]
data = await request.json()
try:
assert (
data.get(KEY_EXTERNAL_ID, "").split(":")[1]
== person.rachio.webhook_auth
)
except (AssertionError, IndexError):
return web.Response(status=web.HTTPForbidden.status_code)
update_type = data[KEY_TYPE]
if update_type in SIGNAL_MAP:
async_dispatcher_send(hass, SIGNAL_MAP[update_type], data)
return web.Response(status=web.HTTPNoContent.status_code)
webhook.async_register(
hass, DOMAIN, "Rachio", webhook_id, _async_handle_rachio_webhook
) |
Unregister a webhook. | def async_unregister_webhook(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Unregister a webhook."""
webhook_id: str = entry.data[CONF_WEBHOOK_ID]
webhook.async_unregister(hass, webhook_id) |
Return a RadarrEvent from an API event. | def _get_calendar_event(event: RadarrCalendarItem) -> RadarrEvent:
"""Return a RadarrEvent from an API event."""
_date, _type = event.releaseDateType()
return RadarrEvent(
summary=event.title,
start=_date - timedelta(days=1),
end=_date,
description=event.overview.replace(":", ";"),
release_type=_type,
) |
Get space. | def get_space(data: list[Diskspace], name: str) -> str:
"""Get space."""
space = [
mount.freeSpace / 1024 ** BYTE_SIZES.index(UnitOfInformation.GIGABYTES)
for mount in data
if name in mount.path
]
return f"{space[0]:.2f}" |
Return modified description and folder name. | def get_modified_description(
description: RadarrSensorEntityDescription[T], mount: RootFolder
) -> tuple[RadarrSensorEntityDescription[T], str]:
"""Return modified description and folder name."""
name = mount.path.rsplit("/")[-1].rsplit("\\")[-1]
desc = dataclasses.replace(
description,
key=f"{description.key}_{name}",
name=f"{description.name} {name}".capitalize(),
)
return desc, name |
Round a temperature to the resolution of the thermostat.
RadioThermostats can handle 0.5 degree temps so the input
temperature is rounded to that value and returned. | def round_temp(temperature):
"""Round a temperature to the resolution of the thermostat.
RadioThermostats can handle 0.5 degree temps so the input
temperature is rounded to that value and returned.
"""
return round(temperature * 2.0) / 2.0 |
Set device time. | def _set_time(device: CommonThermostat) -> None:
"""Set device time."""
# Calling this clears any local temperature override and
# reverts to the scheduled temperature.
now = dt_util.now()
device.time = {
"day": now.weekday(),
"hour": now.hour,
"minute": now.minute,
} |
Create a rainbird async_create_clientsession with a connection limit. | def async_create_clientsession() -> aiohttp.ClientSession:
"""Create a rainbird async_create_clientsession with a connection limit."""
return aiohttp.ClientSession(
connector=aiohttp.TCPConnector(limit=CONECTION_LIMIT),
) |
Register cleanup hooks for the clientsession. | def _async_register_clientsession_shutdown(
hass: HomeAssistant, entry: ConfigEntry, clientsession: aiohttp.ClientSession
) -> None:
"""Register cleanup hooks for the clientsession."""
async def _async_close_websession(*_: Any) -> None:
"""Close websession."""
await clientsession.close()
unsub = hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_CLOSE, _async_close_websession
)
entry.async_on_unload(unsub)
entry.async_on_unload(_async_close_websession) |
Migrate existing entity if current one can't be found and an old one exists. | def _async_fix_entity_unique_id(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
config_entry_id: str,
mac_address: str,
serial_number: str,
) -> None:
"""Migrate existing entity if current one can't be found and an old one exists."""
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
for entity_entry in entity_entries:
unique_id = str(entity_entry.unique_id)
if unique_id.startswith(mac_address):
continue
if (suffix := unique_id.removeprefix(str(serial_number))) != unique_id:
new_unique_id = f"{mac_address}{suffix}"
_LOGGER.debug("Updating unique id from %s to %s", unique_id, new_unique_id)
entity_registry.async_update_entity(
entity_entry.entity_id, new_unique_id=new_unique_id
) |
Determine which device entry to keep when there are duplicates.
As we transitioned to new unique ids, we did not update existing device entries
and as a result there are devices with both the old and new unique id format. We
have to pick which one to keep, and preferably this can repair things if the
user previously renamed devices. | def _async_device_entry_to_keep(
old_entry: dr.DeviceEntry, new_entry: dr.DeviceEntry
) -> dr.DeviceEntry:
"""Determine which device entry to keep when there are duplicates.
As we transitioned to new unique ids, we did not update existing device entries
and as a result there are devices with both the old and new unique id format. We
have to pick which one to keep, and preferably this can repair things if the
user previously renamed devices.
"""
# Prefer the new device if the user already gave it a name or area. Otherwise,
# do the same for the old entry. If no entries have been modified then keep the new one.
if new_entry.disabled_by is None and (
new_entry.area_id is not None or new_entry.name_by_user is not None
):
return new_entry
if old_entry.disabled_by is None and (
old_entry.area_id is not None or old_entry.name_by_user is not None
):
return old_entry
return new_entry if new_entry.disabled_by is None else old_entry |
Migrate existing device identifiers to the new format.
This will rename any device ids that are prefixed with the serial number to be prefixed
with the mac address. This also cleans up from a bug that allowed devices to exist
in both the old and new format. | def _async_fix_device_id(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
config_entry_id: str,
mac_address: str,
serial_number: str,
) -> None:
"""Migrate existing device identifiers to the new format.
This will rename any device ids that are prefixed with the serial number to be prefixed
with the mac address. This also cleans up from a bug that allowed devices to exist
in both the old and new format.
"""
device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id)
device_entry_map = {}
migrations = {}
for device_entry in device_entries:
unique_id = str(next(iter(device_entry.identifiers))[1])
device_entry_map[unique_id] = device_entry
if (suffix := unique_id.removeprefix(str(serial_number))) != unique_id:
migrations[unique_id] = f"{mac_address}{suffix}"
for unique_id, new_unique_id in migrations.items():
old_entry = device_entry_map[unique_id]
if (new_entry := device_entry_map.get(new_unique_id)) is not None:
# Device entries exist for both the old and new format and one must be removed
entry_to_keep = _async_device_entry_to_keep(old_entry, new_entry)
if entry_to_keep == new_entry:
_LOGGER.debug("Removing device entry %s", unique_id)
device_registry.async_remove_device(old_entry.id)
continue
# Remove new entry and update old entry to new id below
_LOGGER.debug("Removing device entry %s", new_unique_id)
device_registry.async_remove_device(new_entry.id)
_LOGGER.debug("Updating device id from %s to %s", unique_id, new_unique_id)
device_registry.async_update_device(
old_entry.id, new_identifiers={(DOMAIN, new_unique_id)}
) |
Set up a sensor for a raincloud device. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if sensor_type == "status":
sensors.append(RainCloudBinarySensor(raincloud.controller, sensor_type))
sensors.append(
RainCloudBinarySensor(raincloud.controller.faucet, sensor_type)
)
else:
# create a sensor for each zone managed by faucet
sensors.extend(
RainCloudBinarySensor(zone, sensor_type)
for zone in raincloud.controller.faucet.zones
)
add_entities(sensors, True) |
Set up a sensor for a raincloud device. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if sensor_type == "battery":
sensors.append(RainCloudSensor(raincloud.controller.faucet, sensor_type))
else:
# create a sensor for each zone managed by a faucet
sensors.extend(
RainCloudSensor(zone, sensor_type)
for zone in raincloud.controller.faucet.zones
)
add_entities(sensors, True) |
Set up a sensor for a raincloud device. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
default_watering_timer = config[CONF_WATERING_TIME]
add_entities(
(
RainCloudSwitch(default_watering_timer, zone, sensor_type)
for zone in raincloud.controller.faucet.zones
for sensor_type in config[CONF_MONITORED_CONDITIONS]
),
True,
) |
Set up the Melnor RainCloud component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Melnor RainCloud component."""
conf = config[DOMAIN]
username = conf.get(CONF_USERNAME)
password = conf.get(CONF_PASSWORD)
scan_interval = conf.get(CONF_SCAN_INTERVAL)
try:
raincloud = RainCloudy(username=username, password=password)
if not raincloud.is_connected:
raise HTTPError
hass.data[DATA_RAINCLOUD] = RainCloudHub(raincloud)
except (ConnectTimeout, HTTPError) as ex:
_LOGGER.error("Unable to connect to Rain Cloud service: %s", str(ex))
persistent_notification.create(
hass,
f"Error: {ex}<br />You will need to restart hass after fixing.",
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
def hub_refresh(event_time):
"""Call Raincloud hub to refresh information."""
_LOGGER.debug("Updating RainCloud Hub component")
hass.data[DATA_RAINCLOUD].data.update()
dispatcher_send(hass, SIGNAL_UPDATE_RAINCLOUD)
# Call the Raincloud API to refresh updates
track_time_interval(hass, hub_refresh, scan_interval)
return True |
Create user schema with passed in defaults if available. | def create_schema(user_input: dict[str, Any] | None) -> vol.Schema:
"""Create user schema with passed in defaults if available."""
if user_input is None:
user_input = {}
return vol.Schema(
{
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST)): str,
vol.Required(CONF_CLOUD_ID, default=user_input.get(CONF_CLOUD_ID)): str,
vol.Required(
CONF_INSTALL_CODE, default=user_input.get(CONF_INSTALL_CODE)
): str,
}
) |
Generate unique id from usb attributes. | def _generate_unique_id(info: ListPortInfo | usb.UsbServiceInfo) -> str:
"""Generate unique id from usb attributes."""
return (
f"{_format_id(info.vid)}:{_format_id(info.pid)}_{info.serial_number}"
f"_{info.manufacturer}_{info.description}"
) |
Redact meter MAC addresses from mapping keys. | def async_redact_meter_macs(data: dict) -> dict:
"""Redact meter MAC addresses from mapping keys."""
if not data.get("Meters"):
return data
redacted = {**data, "Meters": {}}
for idx, mac_id in enumerate(data["Meters"]):
redacted["Meters"][f"**REDACTED{idx}**"] = data["Meters"][mac_id]
return redacted |
Return the first local controller. | def get_client_controller(client: Client) -> Controller:
"""Return the first local controller."""
return next(iter(client.controllers.values())) |
Define a decorator to raise on a request error. | def raise_on_request_error(
func: Callable[Concatenate[_T, _P], Awaitable[None]],
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]:
"""Define a decorator to raise on a request error."""
async def decorator(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Decorate."""
try:
await func(self, *args, **kwargs)
except RainMachineError as err:
raise HomeAssistantError(
f"Error while executing {func.__name__}: {err}",
) from err
return decorator |
Remove old entities and create a repairs issue with info on their replacement. | def async_finish_entity_domain_replacements(
hass: HomeAssistant,
entry: ConfigEntry,
entity_replacement_strategies: Iterable[EntityDomainReplacementStrategy],
) -> None:
"""Remove old entities and create a repairs issue with info on their replacement."""
ent_reg = er.async_get(hass)
for strategy in entity_replacement_strategies:
try:
[registry_entry] = [
registry_entry
for registry_entry in ent_reg.entities.get_entries_for_config_entry_id(
entry.entry_id
)
if registry_entry.domain == strategy.old_domain
and registry_entry.unique_id == strategy.old_unique_id
]
except ValueError:
continue
old_entity_id = registry_entry.entity_id
if strategy.remove_old_entity:
LOGGER.info('Removing old entity: "%s"', old_entity_id)
ent_reg.async_remove(old_entity_id) |
Return whether a key exists in a nested dict. | def key_exists(data: dict[str, Any], search_key: str) -> bool:
"""Return whether a key exists in a nested dict."""
for key, value in data.items():
if key == search_key:
return True
if isinstance(value, dict):
return key_exists(value, search_key)
return False |
Get the controller related to a service call (by device ID). | def async_get_entry_for_service_call(
hass: HomeAssistant, call: ServiceCall
) -> ConfigEntry:
"""Get the controller related to a service call (by device ID)."""
device_id = call.data[CONF_DEVICE_ID]
device_registry = dr.async_get(hass)
if (device_entry := device_registry.async_get(device_id)) is None:
raise ValueError(f"Invalid RainMachine device ID: {device_id}")
for entry_id in device_entry.config_entries:
if (entry := hass.config_entries.async_get_entry(entry_id)) is None:
continue
if entry.domain == DOMAIN:
return entry
raise ValueError(f"No controller for device ID: {device_id}") |
Generate schema. | def _generate_schema(domain: str, flow_type: _FlowType) -> vol.Schema:
"""Generate schema."""
schema: dict[vol.Marker, Any] = {}
if flow_type == _FlowType.CONFIG:
schema[vol.Required(CONF_NAME)] = TextSelector()
if domain == Platform.BINARY_SENSOR:
schema[vol.Optional(CONF_DEVICE_CLASS)] = SelectSelector(
SelectSelectorConfig(
options=[cls.value for cls in BinarySensorDeviceClass],
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key="binary_sensor_device_class",
),
)
if domain == Platform.SENSOR:
schema.update(
{
vol.Optional(CONF_MINIMUM, default=DEFAULT_MIN): cv.positive_int,
vol.Optional(CONF_MAXIMUM, default=DEFAULT_MAX): cv.positive_int,
vol.Optional(CONF_DEVICE_CLASS): SelectSelector(
SelectSelectorConfig(
options=[
cls.value
for cls in SensorDeviceClass
if cls != SensorDeviceClass.ENUM
],
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key="sensor_device_class",
),
),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): SelectSelector(
SelectSelectorConfig(
options=[
str(unit)
for units in DEVICE_CLASS_UNITS.values()
for unit in units
if unit is not None
],
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key="sensor_unit_of_measurement",
custom_value=True,
),
),
}
)
return vol.Schema(schema) |
Validate unit of measurement. | def _validate_unit(options: dict[str, Any]) -> None:
"""Validate unit of measurement."""
if (
(device_class := options.get(CONF_DEVICE_CLASS))
and (units := DEVICE_CLASS_UNITS.get(device_class))
and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units
):
sorted_units = sorted(
[f"'{str(unit)}'" if unit else "no unit of measurement" for unit in units],
key=str.casefold,
)
if len(sorted_units) == 1:
units_string = sorted_units[0]
else:
units_string = f"one of {', '.join(sorted_units)}"
raise vol.Invalid(
f"'{unit}' is not a valid unit for device class '{device_class}'; "
f"expected {units_string}"
) |
Do post validation of user input.
For sensors: Validate unit of measurement. | def validate_user_input(
template_type: str,
) -> Callable[
[SchemaCommonFlowHandler, dict[str, Any]],
Coroutine[Any, Any, dict[str, Any]],
]:
"""Do post validation of user input.
For sensors: Validate unit of measurement.
"""
async def _validate_user_input(
_: SchemaCommonFlowHandler,
user_input: dict[str, Any],
) -> dict[str, Any]:
"""Add template type to user input."""
if template_type == Platform.SENSOR:
_validate_unit(user_input)
return {"entity_type": template_type} | user_input
return _validate_user_input |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Return board info. | def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
"""Return board info."""
if (os_info := get_os_info(hass)) is None:
raise HomeAssistantError
board: str | None
if (board := os_info.get("board")) is None:
raise HomeAssistantError
if not board.startswith("rpi"):
raise HomeAssistantError
config_entries = [
entry.entry_id for entry in hass.config_entries.async_entries(DOMAIN)
]
return [
HardwareInfo(
board=BoardInfo(
hassio_board_id=board,
manufacturer=DOMAIN,
model=MODELS.get(board),
revision=None,
),
config_entries=config_entries,
dongle=None,
name=BOARD_NAMES.get(board, f"Unknown Raspberry Pi model '{board}'"),
url=None,
)
] |
Set up the RaspyRFM switch. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the RaspyRFM switch."""
gateway_manufacturer = config.get(
CONF_GATEWAY_MANUFACTURER, Manufacturer.SEEGEL_SYSTEME.value
)
gateway_model = config.get(CONF_GATEWAY_MODEL, GatewayModel.RASPYRFM.value)
host = config[CONF_HOST]
port = config.get(CONF_PORT)
switches = config[CONF_SWITCHES]
raspyrfm_client = RaspyRFMClient()
gateway = raspyrfm_client.get_gateway(
Manufacturer(gateway_manufacturer), GatewayModel(gateway_model), host, port
)
switch_entities = []
for switch in switches:
name = switch[CONF_NAME]
controlunit_manufacturer = switch[CONF_CONTROLUNIT_MANUFACTURER]
controlunit_model = switch[CONF_CONTROLUNIT_MODEL]
channel_config = switch[CONF_CHANNEL_CONFIG]
controlunit = raspyrfm_client.get_controlunit(
Manufacturer(controlunit_manufacturer), ControlUnitModel(controlunit_model)
)
controlunit.set_channel_config(**channel_config)
switch = RaspyRFMSwitch(raspyrfm_client, name, gateway, controlunit)
switch_entities.append(switch)
add_entities(switch_entities) |
Get a HASS CalendarEvent from an aiorecollect PickupEvent. | def async_get_calendar_event_from_pickup_event(
entry: ConfigEntry, pickup_event: PickupEvent
) -> CalendarEvent:
"""Get a HASS CalendarEvent from an aiorecollect PickupEvent."""
pickup_type_string = ", ".join(
async_get_pickup_type_names(entry, pickup_event.pickup_types)
)
return CalendarEvent(
summary="ReCollect Waste Pickup",
description=f"Pickup types: {pickup_type_string}",
location=pickup_event.area_name,
start=pickup_event.date,
end=pickup_event.date + datetime.timedelta(days=1),
) |
Return proper pickup type names from their associated objects. | def async_get_pickup_type_names(
entry: ConfigEntry, pickup_types: list[PickupType]
) -> list[str]:
"""Return proper pickup type names from their associated objects."""
return [
t.friendly_name
if entry.options.get(CONF_FRIENDLY_NAME) and t.friendly_name
else t.name
for t in pickup_types
] |
Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite. | def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite."""
return "CHAR(0)" |
Compile Unused as CHAR(1) on postgresql. | def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile Unused as CHAR(1) on postgresql."""
return "CHAR(1)" |
Set up the entity hooks. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the entity hooks."""
@callback
def _async_entity_id_changed(
event: Event[er.EventEntityRegistryUpdatedData],
) -> None:
instance = get_instance(hass)
if TYPE_CHECKING:
assert event.data["action"] == "update" and "old_entity_id" in event.data
old_entity_id = event.data["old_entity_id"]
new_entity_id = event.data["entity_id"]
instance.async_update_statistics_metadata(
old_entity_id, new_statistic_id=new_entity_id
)
instance.async_update_states_metadata(
old_entity_id, new_entity_id=new_entity_id
)
@callback
def entity_registry_changed_filter(
event_data: er.EventEntityRegistryUpdatedData,
) -> bool:
"""Handle entity_id changed filter."""
return event_data["action"] == "update" and "old_entity_id" in event_data
@callback
def _setup_entity_registry_event_handler(hass: HomeAssistant) -> None:
"""Subscribe to event registry events."""
hass.bus.async_listen(
er.EVENT_ENTITY_REGISTRY_UPDATED,
_async_entity_id_changed,
event_filter=entity_registry_changed_filter,
)
async_at_start(hass, _setup_entity_registry_event_handler) |
Update the states metadata table when an entity is renamed. | def update_states_metadata(
instance: Recorder,
entity_id: str,
new_entity_id: str,
) -> None:
"""Update the states metadata table when an entity is renamed."""
states_meta_manager = instance.states_meta_manager
if not states_meta_manager.active:
_LOGGER.warning(
"Cannot rename entity_id `%s` to `%s` "
"because the states meta manager is not yet active",
entity_id,
new_entity_id,
)
return
with session_scope(
session=instance.get_session(),
exception_filter=filter_unique_constraint_integrity_error(instance, "state"),
) as session:
if not states_meta_manager.update_metadata(session, entity_id, new_entity_id):
_LOGGER.warning(
"Cannot migrate history for entity_id `%s` to `%s` "
"because the new entity_id is already in use",
entity_id,
new_entity_id,
) |
Create a worker that calls a function after its finished. | def _worker_with_shutdown_hook(
shutdown_hook: Callable[[], None], *args: Any, **kwargs: Any
) -> None:
"""Create a worker that calls a function after its finished."""
_worker(*args, **kwargs)
shutdown_hook() |
Extract an include exclude filter from configuration.
This makes a copy so we do not alter the original data. | def extract_include_exclude_filter_conf(conf: ConfigType) -> dict[str, Any]:
"""Extract an include exclude filter from configuration.
This makes a copy so we do not alter the original data.
"""
return {
filter_type: {
matcher: set(conf.get(filter_type, {}).get(matcher) or [])
for matcher in FITLER_MATCHERS
}
for filter_type in FILTER_TYPES
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.