response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Return the resource for the specified domain_data. | def get_resource(domain_name: str, domain_data: ConfigType) -> str:
"""Return the resource for the specified domain_data."""
if domain_name in ("switch", "light"):
return cast(str, domain_data["output"])
if domain_name in ("binary_sensor", "sensor"):
return cast(str, domain_data["source"])
if domain_name == "cover":
return cast(str, domain_data["motor"])
if domain_name == "climate":
return f'{domain_data["source"]}.{domain_data["setpoint"]}'
if domain_name == "scene":
return f'{domain_data["register"]}.{domain_data["scene"]}'
raise ValueError("Unknown domain") |
Return the model for the specified domain_data. | def get_device_model(domain_name: str, domain_data: ConfigType) -> str:
"""Return the model for the specified domain_data."""
if domain_name in ("switch", "light"):
return "Output" if domain_data[CONF_OUTPUT] in OUTPUT_PORTS else "Relay"
if domain_name in ("binary_sensor", "sensor"):
if domain_data[CONF_SOURCE] in BINSENSOR_PORTS:
return "Binary Sensor"
if domain_data[CONF_SOURCE] in chain(
VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS
):
return "Variable"
if domain_data[CONF_SOURCE] in LED_PORTS:
return "Led"
if domain_data[CONF_SOURCE] in LOGICOP_PORTS:
return "Logical Operation"
return "Key"
if domain_name == "cover":
return "Motor"
if domain_name == "climate":
return "Regulator"
if domain_name == "scene":
return "Scene"
raise ValueError("Unknown domain") |
Generate a unique_id from the given parameters. | def generate_unique_id(
entry_id: str,
address: AddressType,
resource: str | None = None,
) -> str:
"""Generate a unique_id from the given parameters."""
unique_id = entry_id
is_group = "g" if address[2] else "m"
unique_id += f"-{is_group}{address[0]:03d}{address[1]:03d}"
if resource:
unique_id += f"-{resource}".lower()
return unique_id |
Convert lcn settings from configuration.yaml to config_entries data.
Create a list of config_entry data structures like:
"data": {
"host": "pchk",
"ip_address": "192.168.2.41",
"port": 4114,
"username": "lcn",
"password": "lcn,
"sk_num_tries: 0,
"dim_mode: "STEPS200",
"devices": [
{
"address": (0, 7, False)
"name": "",
"hardware_serial": -1,
"software_serial": -1,
"hardware_type": -1
}, ...
],
"entities": [
{
"address": (0, 7, False)
"name": "Light_Output1",
"resource": "output1",
"domain": "light",
"domain_data": {
"output": "OUTPUT1",
"dimmable": True,
"transition": 5000.0
}
}, ...
]
} | def import_lcn_config(lcn_config: ConfigType) -> list[ConfigType]:
"""Convert lcn settings from configuration.yaml to config_entries data.
Create a list of config_entry data structures like:
"data": {
"host": "pchk",
"ip_address": "192.168.2.41",
"port": 4114,
"username": "lcn",
"password": "lcn,
"sk_num_tries: 0,
"dim_mode: "STEPS200",
"devices": [
{
"address": (0, 7, False)
"name": "",
"hardware_serial": -1,
"software_serial": -1,
"hardware_type": -1
}, ...
],
"entities": [
{
"address": (0, 7, False)
"name": "Light_Output1",
"resource": "output1",
"domain": "light",
"domain_data": {
"output": "OUTPUT1",
"dimmable": True,
"transition": 5000.0
}
}, ...
]
}
"""
data = {}
for connection in lcn_config[CONF_CONNECTIONS]:
host = {
CONF_HOST: connection[CONF_NAME],
CONF_IP_ADDRESS: connection[CONF_HOST],
CONF_PORT: connection[CONF_PORT],
CONF_USERNAME: connection[CONF_USERNAME],
CONF_PASSWORD: connection[CONF_PASSWORD],
CONF_SK_NUM_TRIES: connection[CONF_SK_NUM_TRIES],
CONF_DIM_MODE: connection[CONF_DIM_MODE],
CONF_DEVICES: [],
CONF_ENTITIES: [],
}
data[connection[CONF_NAME]] = host
for confkey, domain_config in lcn_config.items():
if confkey == CONF_CONNECTIONS:
continue
domain = DOMAIN_LOOKUP[confkey]
# loop over entities in configuration.yaml
for domain_data in domain_config:
# remove name and address from domain_data
entity_name = domain_data.pop(CONF_NAME)
address, host_name = domain_data.pop(CONF_ADDRESS)
if host_name is None:
host_name = DEFAULT_NAME
# check if we have a new device config
for device_config in data[host_name][CONF_DEVICES]:
if address == device_config[CONF_ADDRESS]:
break
else: # create new device_config
device_config = {
CONF_ADDRESS: address,
CONF_NAME: "",
CONF_HARDWARE_SERIAL: -1,
CONF_SOFTWARE_SERIAL: -1,
CONF_HARDWARE_TYPE: -1,
}
data[host_name][CONF_DEVICES].append(device_config)
# insert entity config
resource = get_resource(domain, domain_data).lower()
for entity_config in data[host_name][CONF_ENTITIES]:
if (
address == entity_config[CONF_ADDRESS]
and resource == entity_config[CONF_RESOURCE]
and domain == entity_config[CONF_DOMAIN]
):
break
else: # create new entity_config
entity_config = {
CONF_ADDRESS: address,
CONF_NAME: entity_name,
CONF_RESOURCE: resource,
CONF_DOMAIN: domain,
CONF_DOMAIN_DATA: domain_data.copy(),
}
data[host_name][CONF_ENTITIES].append(entity_config)
return list(data.values()) |
Remove orphans from entity registry which are not in entry data. | def purge_entity_registry(
hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType
) -> None:
"""Remove orphans from entity registry which are not in entry data."""
entity_registry = er.async_get(hass)
# Find all entities that are referenced in the config entry.
references_config_entry = {
entity_entry.entity_id
for entity_entry in er.async_entries_for_config_entry(entity_registry, entry_id)
}
# Find all entities that are referenced by the entry_data.
references_entry_data = set()
for entity_data in imported_entry_data[CONF_ENTITIES]:
entity_unique_id = generate_unique_id(
entry_id, entity_data[CONF_ADDRESS], entity_data[CONF_RESOURCE]
)
entity_id = entity_registry.async_get_entity_id(
entity_data[CONF_DOMAIN], DOMAIN, entity_unique_id
)
if entity_id is not None:
references_entry_data.add(entity_id)
orphaned_ids = references_config_entry - references_entry_data
for orphaned_id in orphaned_ids:
entity_registry.async_remove(orphaned_id) |
Remove orphans from device registry which are not in entry data. | def purge_device_registry(
hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType
) -> None:
"""Remove orphans from device registry which are not in entry data."""
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
# Find all devices that are referenced in the entity registry.
references_entities = {
entry.device_id
for entry in entity_registry.entities.get_entries_for_config_entry_id(entry_id)
}
# Find device that references the host.
references_host = set()
host_device = device_registry.async_get_device(identifiers={(DOMAIN, entry_id)})
if host_device is not None:
references_host.add(host_device.id)
# Find all devices that are referenced by the entry_data.
references_entry_data = set()
for device_data in imported_entry_data[CONF_DEVICES]:
device_unique_id = generate_unique_id(entry_id, device_data[CONF_ADDRESS])
device = device_registry.async_get_device(
identifiers={(DOMAIN, device_unique_id)}
)
if device is not None:
references_entry_data.add(device.id)
orphaned_ids = (
{
entry.id
for entry in dr.async_entries_for_config_entry(device_registry, entry_id)
}
- references_entities
- references_host
- references_entry_data
)
for device_id in orphaned_ids:
device_registry.async_remove_device(device_id) |
Register LCN host for given config_entry in device registry. | def register_lcn_host_device(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Register LCN host for given config_entry in device registry."""
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={(DOMAIN, config_entry.entry_id)},
manufacturer="Issendorff",
name=config_entry.title,
model="LCN-PCHK",
) |
Register LCN modules and groups defined in config_entry as devices in device registry.
The name of all given device_connections is collected and the devices
are updated. | def register_lcn_address_devices(
hass: HomeAssistant, config_entry: ConfigEntry
) -> None:
"""Register LCN modules and groups defined in config_entry as devices in device registry.
The name of all given device_connections is collected and the devices
are updated.
"""
device_registry = dr.async_get(hass)
host_identifiers = (DOMAIN, config_entry.entry_id)
for device_config in config_entry.data[CONF_DEVICES]:
address = device_config[CONF_ADDRESS]
device_name = device_config[CONF_NAME]
identifiers = {(DOMAIN, generate_unique_id(config_entry.entry_id, address))}
if device_config[CONF_ADDRESS][2]: # is group
device_model = f"LCN group (g{address[0]:03d}{address[1]:03d})"
sw_version = None
else: # is module
hardware_type = device_config[CONF_HARDWARE_TYPE]
if hardware_type in pypck.lcn_defs.HARDWARE_DESCRIPTIONS:
hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[hardware_type]
else:
hardware_name = pypck.lcn_defs.HARDWARE_DESCRIPTIONS[-1]
device_model = f"{hardware_name} (m{address[0]:03d}{address[1]:03d})"
sw_version = f"{device_config[CONF_SOFTWARE_SERIAL]:06X}"
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers=identifiers,
via_device=host_identifiers,
manufacturer="Issendorff",
sw_version=sw_version,
name=device_name,
model=device_model,
) |
Validate that all connection names are unique.
Use 'pchk' as default connection_name (or add a numeric suffix if
pchk' is already in use. | def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]:
"""Validate that all connection names are unique.
Use 'pchk' as default connection_name (or add a numeric suffix if
pchk' is already in use.
"""
suffix = 0
for host in hosts:
if host.get(CONF_NAME) is None:
if suffix == 0:
host[CONF_NAME] = DEFAULT_NAME
else:
host[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}"
suffix += 1
schema = vol.Schema(vol.Unique())
schema([host.get(CONF_NAME) for host in hosts])
return hosts |
Validate the given address string.
Examples for S000M005 at myhome:
myhome.s000.m005
myhome.s0.m5
myhome.0.5 ("m" is implicit if missing)
Examples for s000g011
myhome.0.g11
myhome.s0.g11 | def is_address(value: str) -> tuple[AddressType, str]:
"""Validate the given address string.
Examples for S000M005 at myhome:
myhome.s000.m005
myhome.s0.m5
myhome.0.5 ("m" is implicit if missing)
Examples for s000g011
myhome.0.g11
myhome.s0.g11
"""
if matcher := PATTERN_ADDRESS.match(value):
is_group = matcher.group("type") == "g"
addr = (int(matcher.group("seg_id")), int(matcher.group("id")), is_group)
conn_id = matcher.group("conn_id")
return addr, conn_id
raise ValueError(f"{value} is not a valid address string") |
Validate the given states string and return states list. | def is_states_string(states_string: str) -> list[str]:
"""Validate the given states string and return states list."""
if len(states_string) != 8:
raise ValueError("Invalid length of states string")
states = {"1": "ON", "0": "OFF", "T": "TOGGLE", "-": "NOCHANGE"}
return [states[state_string] for state_string in states_string] |
Set up an entity for this domain. | def create_lcn_light_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS:
return LcnOutputLight(entity_config, config_entry.entry_id, device_connection)
# in RELAY_PORTS
return LcnRelayLight(entity_config, config_entry.entry_id, device_connection) |
Set up an entity for this domain. | def create_lcn_scene_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
return LcnScene(entity_config, config_entry.entry_id, device_connection) |
Set up an entity for this domain. | def create_lcn_sensor_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain(
VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS
):
return LcnVariableSensor(
entity_config, config_entry.entry_id, device_connection
)
# in LED_PORTS + LOGICOP_PORTS
return LcnLedLogicSensor(entity_config, config_entry.entry_id, device_connection) |
Set up an entity for this domain. | def create_lcn_switch_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS:
return LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection)
# in RELAY_PORTS
return LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) |
Process received input object (command) from LCN bus. | def async_host_input_received(
hass: HomeAssistant,
config_entry: config_entries.ConfigEntry,
device_registry: dr.DeviceRegistry,
inp: pypck.inputs.Input,
) -> None:
"""Process received input object (command) from LCN bus."""
if not isinstance(inp, pypck.inputs.ModInput):
return
lcn_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION]
logical_address = lcn_connection.physical_to_logical(inp.physical_source_addr)
address = (
logical_address.seg_id,
logical_address.addr_id,
logical_address.is_group,
)
identifiers = {(DOMAIN, generate_unique_id(config_entry.entry_id, address))}
device = device_registry.async_get_device(identifiers=identifiers)
if device is None:
return
if isinstance(inp, pypck.inputs.ModStatusAccessControl):
_async_fire_access_control_event(hass, device, address, inp)
elif isinstance(inp, pypck.inputs.ModSendKeysHost):
_async_fire_send_keys_event(hass, device, address, inp) |
Fire access control event (transponder, transmitter, fingerprint, codelock). | def _async_fire_access_control_event(
hass: HomeAssistant, device: dr.DeviceEntry, address: AddressType, inp: InputType
) -> None:
"""Fire access control event (transponder, transmitter, fingerprint, codelock)."""
event_data = {
"segment_id": address[0],
"module_id": address[1],
"code": inp.code,
}
if device is not None:
event_data.update({CONF_DEVICE_ID: device.id})
if inp.periphery == pypck.lcn_defs.AccessControlPeriphery.TRANSMITTER:
event_data.update(
{"level": inp.level, "key": inp.key, "action": inp.action.value}
)
event_name = f"lcn_{inp.periphery.value.lower()}"
hass.bus.async_fire(event_name, event_data) |
Fire send_keys event. | def _async_fire_send_keys_event(
hass: HomeAssistant, device: dr.DeviceEntry, address: AddressType, inp: InputType
) -> None:
"""Fire send_keys event."""
for table, action in enumerate(inp.actions):
if action == pypck.lcn_defs.SendKeyCommand.DONTSEND:
continue
for key, selected in enumerate(inp.keys):
if not selected:
continue
event_data = {
"segment_id": address[0],
"module_id": address[1],
"key": pypck.lcn_defs.Key(table * 8 + key).name.lower(),
"action": action.name.lower(),
}
if device is not None:
event_data.update({CONF_DEVICE_ID: device.id})
hass.bus.async_fire("lcn_send_keys", event_data) |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Get Device Entry from Device Registry by device ID.
Raises ValueError if device ID is invalid. | def async_get_device_entry_by_device_id(
hass: HomeAssistant, device_id: str
) -> DeviceEntry:
"""Get Device Entry from Device Registry by device ID.
Raises ValueError if device ID is invalid.
"""
device_reg = dr.async_get(hass)
if (device := device_reg.async_get(device_id)) is None:
raise ValueError(f"Device {device_id} is not a valid {DOMAIN} device.")
return device |
Return trigger platform. | def _get_trigger_platform(config: ConfigType) -> TriggerProtocol:
"""Return trigger platform."""
platform_split = config[CONF_PLATFORM].split(".", maxsplit=1)
if len(platform_split) < 2 or platform_split[1] not in TRIGGERS:
raise ValueError(
f"Unknown LG Netcast TV trigger platform {config[CONF_PLATFORM]}"
)
return cast(TriggerProtocol, TRIGGERS[platform_split[1]]) |
Return data for a turn on trigger. | def async_get_turn_on_trigger(device_id: str) -> dict[str, str]:
"""Return data for a turn on trigger."""
return {
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_TYPE: PLATFORM_TYPE,
} |
LG Soundbar config flow test_connect. | def test_connect(host, port):
"""LG Soundbar config flow test_connect."""
uuid_q = Queue(maxsize=1)
name_q = Queue(maxsize=1)
def check_msg_response(response, msgs, attr):
msg = response["msg"]
if msg == msgs or msg in msgs:
if "data" in response and attr in response["data"]:
return True
_LOGGER.debug(
"[%s] msg did not contain expected attr [%s]: %s", msg, attr, response
)
return False
def queue_add(attr_q, data):
try:
attr_q.put_nowait(data)
except Full:
_LOGGER.debug("attempted to add [%s] to full queue", data)
def msg_callback(response):
if check_msg_response(response, ["MAC_INFO_DEV", "PRODUCT_INFO"], "s_uuid"):
queue_add(uuid_q, response["data"]["s_uuid"])
if check_msg_response(response, "SPK_LIST_VIEW_INFO", "s_user_name"):
queue_add(name_q, response["data"]["s_user_name"])
details = {}
try:
connection = temescal.temescal(host, port=port, callback=msg_callback)
connection.get_info()
connection.get_mac_info()
if uuid_q.empty():
connection.get_product_info()
details["name"] = name_q.get(timeout=QUEUE_TIMEOUT)
details["uuid"] = uuid_q.get(timeout=QUEUE_TIMEOUT)
except Empty:
pass
except TimeoutError as err:
raise ConnectionError(f"Connection timeout with server: {host}:{port}") from err
except OSError as err:
raise ConnectionError(f"Cannot resolve hostname: {host}") from err
return details |
Get space. | def get_space(data: list[LidarrRootFolder], name: str) -> str:
"""Get space."""
space: list[float] = []
for mount in data:
if name in mount.path:
mount.freeSpace = mount.freeSpace if mount.accessible else 0
space.append(
mount.freeSpace / 1024 ** BYTE_SIZES.index(UnitOfInformation.GIGABYTES)
)
return f"{space[0]:.2f}" |
Return modified description and folder name. | def get_modified_description(
description: LidarrSensorEntityDescription[T], mount: LidarrRootFolder
) -> tuple[LidarrSensorEntityDescription[T], str]:
"""Return modified description and folder name."""
name = mount.path.rsplit("/")[-1].rsplit("\\")[-1]
desc = dataclasses.replace(
description,
key=f"{description.key}_{name}",
name=f"{description.name} {name}".capitalize(),
)
return desc, name |
Return string description of queue item. | def queue_str(item: LidarrQueueItem) -> str:
"""Return string description of queue item."""
if (
item.sizeleft > 0
and item.timeleft == "00:00:00"
or not hasattr(item, "trackedDownloadState")
):
return "stopped"
return item.trackedDownloadState |
Start discovery of devices. | def async_init_discovery_flow(hass: HomeAssistant, host: str, serial: str) -> None:
"""Start discovery of devices."""
discovery_flow.async_create_flow(
hass,
DOMAIN,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data={CONF_HOST: host, CONF_SERIAL: serial},
) |
Trigger config flows for discovered devices. | def async_trigger_discovery(
hass: HomeAssistant,
discovered_devices: Iterable[Light],
) -> None:
"""Trigger config flows for discovered devices."""
for device in discovered_devices:
# device.mac_addr is not the mac_address, its the serial number
async_init_discovery_flow(hass, device.ip_addr, device.mac_addr) |
Migrate the legacy config entries to have an entry per device. | def async_migrate_legacy_entries(
hass: HomeAssistant,
discovered_hosts_by_serial: dict[str, str],
existing_serials: set[str],
legacy_entry: ConfigEntry,
) -> int:
"""Migrate the legacy config entries to have an entry per device."""
_LOGGER.debug(
"Migrating legacy entries: discovered_hosts_by_serial=%s, existing_serials=%s",
discovered_hosts_by_serial,
existing_serials,
)
device_registry = dr.async_get(hass)
for dev_entry in dr.async_entries_for_config_entry(
device_registry, legacy_entry.entry_id
):
for domain, serial in dev_entry.identifiers:
if (
domain == DOMAIN
and serial not in existing_serials
and (host := discovered_hosts_by_serial.get(serial))
):
async_init_discovery_flow(hass, host, serial)
remaining_devices = dr.async_entries_for_config_entry(
dr.async_get(hass), legacy_entry.entry_id
)
_LOGGER.debug("The following devices remain: %s", remaining_devices)
return len(remaining_devices) |
Move entities and devices to the new config entry. | def async_migrate_entities_devices(
hass: HomeAssistant, legacy_entry_id: str, new_entry: ConfigEntry
) -> None:
"""Move entities and devices to the new config entry."""
migrated_devices = []
device_registry = dr.async_get(hass)
for dev_entry in dr.async_entries_for_config_entry(
device_registry, legacy_entry_id
):
for domain, value in dev_entry.identifiers:
if domain == DOMAIN and value == new_entry.unique_id:
_LOGGER.debug(
"Migrating device with %s to %s",
dev_entry.identifiers,
new_entry.unique_id,
)
migrated_devices.append(dev_entry.id)
device_registry.async_update_device(
dev_entry.id,
add_config_entry_id=new_entry.entry_id,
remove_config_entry_id=legacy_entry_id,
)
entity_registry = er.async_get(hass)
for reg_entity in er.async_entries_for_config_entry(
entity_registry, legacy_entry_id
):
if reg_entity.device_id in migrated_devices:
entity_registry.async_update_entity(
reg_entity.entity_id, config_entry_id=new_entry.entry_id
) |
Check if a config entry is the legacy shared one. | def async_entry_is_legacy(entry: ConfigEntry) -> bool:
"""Check if a config entry is the legacy shared one."""
return entry.unique_id is None or entry.unique_id == DOMAIN |
Get the legacy config entry. | def async_get_legacy_entry(hass: HomeAssistant) -> ConfigEntry | None:
"""Get the legacy config entry."""
for entry in hass.config_entries.async_entries(DOMAIN):
if async_entry_is_legacy(entry):
return entry
return None |
Convert infrared brightness from value to option. | def infrared_brightness_value_to_option(value: int) -> str | None:
"""Convert infrared brightness from value to option."""
return INFRARED_BRIGHTNESS_VALUES_MAP.get(value, None) |
Convert infrared brightness option to value. | def infrared_brightness_option_to_value(option: str) -> int | None:
"""Convert infrared brightness option to value."""
option_values = {v: k for k, v in INFRARED_BRIGHTNESS_VALUES_MAP.items()}
return option_values.get(option) |
Scale an 8 bit level into 16 bits. | def convert_8_to_16(value: int) -> int:
"""Scale an 8 bit level into 16 bits."""
return (value << 8) | value |
Scale a 16 bit level into 8 bits. | def convert_16_to_8(value: int) -> int:
"""Scale a 16 bit level into 8 bits."""
return value >> 8 |
Return a feature map for this bulb, or a default map if unknown. | def lifx_features(bulb: Light) -> dict[str, Any]:
"""Return a feature map for this bulb, or a default map if unknown."""
features: dict[str, Any] = (
products.features_map.get(bulb.product) or products.features_map[1]
)
return features |
Find the desired color from a number of possible inputs.
Hue, Saturation, Brightness, Kelvin | def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] | None:
"""Find the desired color from a number of possible inputs.
Hue, Saturation, Brightness, Kelvin
"""
hue, saturation, brightness, kelvin = [None] * 4
if (color_name := kwargs.get(ATTR_COLOR_NAME)) is not None:
try:
hue, saturation = color_util.color_RGB_to_hs(
*color_util.color_name_to_rgb(color_name)
)
except ValueError:
_LOGGER.warning(
"Got unknown color %s, falling back to neutral white", color_name
)
hue, saturation = (0, 0)
if ATTR_HS_COLOR in kwargs:
hue, saturation = kwargs[ATTR_HS_COLOR]
elif ATTR_RGB_COLOR in kwargs:
hue, saturation = color_util.color_RGB_to_hs(*kwargs[ATTR_RGB_COLOR])
elif ATTR_XY_COLOR in kwargs:
hue, saturation = color_util.color_xy_to_hs(*kwargs[ATTR_XY_COLOR])
if hue is not None:
assert saturation is not None
hue = int(hue / 360 * 65535)
saturation = int(saturation / 100 * 65535)
kelvin = 3500
if ATTR_KELVIN in kwargs:
_LOGGER.warning(
"The 'kelvin' parameter is deprecated. Please use 'color_temp_kelvin' for"
" all service calls"
)
kelvin = kwargs.pop(ATTR_KELVIN)
saturation = 0
if ATTR_COLOR_TEMP in kwargs:
kelvin = color_util.color_temperature_mired_to_kelvin(
kwargs.pop(ATTR_COLOR_TEMP)
)
saturation = 0
if ATTR_COLOR_TEMP_KELVIN in kwargs:
kelvin = kwargs.pop(ATTR_COLOR_TEMP_KELVIN)
saturation = 0
if ATTR_BRIGHTNESS in kwargs:
brightness = convert_8_to_16(kwargs[ATTR_BRIGHTNESS])
if ATTR_BRIGHTNESS_PCT in kwargs:
brightness = convert_8_to_16(round(255 * kwargs[ATTR_BRIGHTNESS_PCT] / 100))
hsbk = [hue, saturation, brightness, kelvin]
return None if hsbk == [None] * 4 else hsbk |
Copy change on top of base, except when None.
Hue, Saturation, Brightness, Kelvin | def merge_hsbk(
base: list[float | int | None], change: list[float | int | None]
) -> list[float | int | None]:
"""Copy change on top of base, except when None.
Hue, Saturation, Brightness, Kelvin
"""
return [b if c is None else c for b, c in zip(base, change, strict=False)] |
Check if the firmware version has the off by one mac. | def _off_by_one_mac(firmware: str) -> bool:
"""Check if the firmware version has the off by one mac."""
return bool(firmware and AwesomeVersion(firmware) >= FIX_MAC_FW) |
Increment the last byte of the mac address by one for FW>3.70. | def get_real_mac_addr(mac_addr: str, firmware: str) -> str:
"""Increment the last byte of the mac address by one for FW>3.70."""
return _get_mac_offset(mac_addr, 1) if _off_by_one_mac(firmware) else mac_addr |
Format the serial number to match the HA device registry. | def formatted_serial(serial_number: str) -> str:
"""Format the serial number to match the HA device registry."""
return dr.format_mac(serial_number) |
Check if a mac address matches the serial number. | def mac_matches_serial_number(mac_addr: str, serial_number: str) -> bool:
"""Check if a mac address matches the serial number."""
formatted_mac = dr.format_mac(mac_addr)
return bool(
formatted_serial(serial_number) == formatted_mac
or _get_mac_offset(serial_number, 1) == formatted_mac
) |
Evaluate state based on configuration. | def async_condition_from_config(
hass: HomeAssistant, config: ConfigType
) -> ConditionCheckerType:
"""Evaluate state based on configuration."""
return toggle_entity.async_condition_from_config(hass, config) |
Test if color_mode is same. | def _color_mode_same(cur_state: State, state: State) -> bool:
"""Test if color_mode is same."""
cur_color_mode = cur_state.attributes.get(ATTR_COLOR_MODE, ColorMode.UNKNOWN)
saved_color_mode = state.attributes.get(ATTR_COLOR_MODE, ColorMode.UNKNOWN)
# Guard for scenes etc. which where created before color modes were introduced
if saved_color_mode == ColorMode.UNKNOWN:
return True
return cast(bool, cur_color_mode == saved_color_mode) |
Return true if the given attributes are equal. | def check_attr_equal(attr1: Mapping, attr2: Mapping, attr_str: str) -> bool:
"""Return true if the given attributes are equal."""
return attr1.get(attr_str) == attr2.get(attr_str) |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if old_state != new_state:
return True
if old_attrs.get(ATTR_EFFECT) != new_attrs.get(ATTR_EFFECT):
return True
old_color = old_attrs.get(ATTR_HS_COLOR)
new_color = new_attrs.get(ATTR_HS_COLOR)
if old_color and new_color:
# Range 0..360
if check_absolute_change(old_color[0], new_color[0], 5):
return True
# Range 0..100
if check_absolute_change(old_color[1], new_color[1], 3):
return True
if check_absolute_change(
old_attrs.get(ATTR_BRIGHTNESS), new_attrs.get(ATTR_BRIGHTNESS), 3
):
return True
if check_absolute_change(
# Default range 153..500
old_attrs.get(ATTR_COLOR_TEMP),
new_attrs.get(ATTR_COLOR_TEMP),
5,
):
return True
return False |
Filter the given color modes. | def filter_supported_color_modes(color_modes: Iterable[ColorMode]) -> set[ColorMode]:
"""Filter the given color modes."""
color_modes = set(color_modes)
if (
not color_modes
or ColorMode.UNKNOWN in color_modes
or (ColorMode.WHITE in color_modes and not color_supported(color_modes))
):
raise HomeAssistantError
if ColorMode.ONOFF in color_modes and len(color_modes) > 1:
color_modes.remove(ColorMode.ONOFF)
if ColorMode.BRIGHTNESS in color_modes and len(color_modes) > 1:
color_modes.remove(ColorMode.BRIGHTNESS)
return color_modes |
Validate the given color modes. | def valid_supported_color_modes(
color_modes: Iterable[ColorMode | str],
) -> set[ColorMode | str]:
"""Validate the given color modes."""
color_modes = set(color_modes)
if (
not color_modes
or ColorMode.UNKNOWN in color_modes
or (ColorMode.BRIGHTNESS in color_modes and len(color_modes) > 1)
or (ColorMode.ONOFF in color_modes and len(color_modes) > 1)
or (ColorMode.WHITE in color_modes and not color_supported(color_modes))
):
raise vol.Error(f"Invalid supported_color_modes {sorted(color_modes)}")
return color_modes |
Test if brightness is supported. | def brightness_supported(color_modes: Iterable[ColorMode | str] | None) -> bool:
"""Test if brightness is supported."""
if not color_modes:
return False
return not COLOR_MODES_BRIGHTNESS.isdisjoint(color_modes) |
Test if color is supported. | def color_supported(color_modes: Iterable[ColorMode | str] | None) -> bool:
"""Test if color is supported."""
if not color_modes:
return False
return not COLOR_MODES_COLOR.isdisjoint(color_modes) |
Test if color temperature is supported. | def color_temp_supported(color_modes: Iterable[ColorMode | str] | None) -> bool:
"""Test if color temperature is supported."""
if not color_modes:
return False
return ColorMode.COLOR_TEMP in color_modes |
Get supported color modes for a light entity.
First try the statemachine, then entity registry.
This is the equivalent of entity helper get_supported_features. | def get_supported_color_modes(hass: HomeAssistant, entity_id: str) -> set[str] | None:
"""Get supported color modes for a light entity.
First try the statemachine, then entity registry.
This is the equivalent of entity helper get_supported_features.
"""
if state := hass.states.get(entity_id):
return state.attributes.get(ATTR_SUPPORTED_COLOR_MODES)
entity_registry = er.async_get(hass)
if not (entry := entity_registry.async_get(entity_id)):
raise HomeAssistantError(f"Unknown entity {entity_id}")
if not entry.capabilities:
return None
return entry.capabilities.get(ATTR_SUPPORTED_COLOR_MODES) |
Return if the lights are on based on the statemachine. | def is_on(hass: HomeAssistant, entity_id: str) -> bool:
"""Return if the lights are on based on the statemachine."""
return hass.states.is_state(entity_id, STATE_ON) |
Process extra data for turn light on request.
Async friendly. | def preprocess_turn_on_alternatives(
hass: HomeAssistant, params: dict[str, Any]
) -> None:
"""Process extra data for turn light on request.
Async friendly.
"""
# Bail out, we process this later.
if ATTR_BRIGHTNESS_STEP in params or ATTR_BRIGHTNESS_STEP_PCT in params:
return
if ATTR_PROFILE in params:
hass.data[DATA_PROFILES].apply_profile(params.pop(ATTR_PROFILE), params)
if (color_name := params.pop(ATTR_COLOR_NAME, None)) is not None:
try:
params[ATTR_RGB_COLOR] = color_util.color_name_to_rgb(color_name)
except ValueError:
_LOGGER.warning("Got unknown color %s, falling back to white", color_name)
params[ATTR_RGB_COLOR] = (255, 255, 255)
if (mired := params.pop(ATTR_COLOR_TEMP, None)) is not None:
kelvin = color_util.color_temperature_mired_to_kelvin(mired)
params[ATTR_COLOR_TEMP] = int(mired)
params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin)
if (kelvin := params.pop(ATTR_KELVIN, None)) is not None:
mired = color_util.color_temperature_kelvin_to_mired(kelvin)
params[ATTR_COLOR_TEMP] = int(mired)
params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin)
if (kelvin := params.pop(ATTR_COLOR_TEMP_KELVIN, None)) is not None:
mired = color_util.color_temperature_kelvin_to_mired(kelvin)
params[ATTR_COLOR_TEMP] = int(mired)
params[ATTR_COLOR_TEMP_KELVIN] = int(kelvin)
brightness_pct = params.pop(ATTR_BRIGHTNESS_PCT, None)
if brightness_pct is not None:
params[ATTR_BRIGHTNESS] = round(255 * brightness_pct / 100) |
Filter out params not used in turn off or not supported by the light. | def filter_turn_off_params(
light: LightEntity, params: dict[str, Any]
) -> dict[str, Any]:
"""Filter out params not used in turn off or not supported by the light."""
if not params:
return params
supported_features = light.supported_features_compat
if LightEntityFeature.FLASH not in supported_features:
params.pop(ATTR_FLASH, None)
if LightEntityFeature.TRANSITION not in supported_features:
params.pop(ATTR_TRANSITION, None)
return {k: v for k, v in params.items() if k in (ATTR_TRANSITION, ATTR_FLASH)} |
Filter out params not supported by the light. | def filter_turn_on_params(light: LightEntity, params: dict[str, Any]) -> dict[str, Any]:
"""Filter out params not supported by the light."""
supported_features = light.supported_features_compat
if LightEntityFeature.EFFECT not in supported_features:
params.pop(ATTR_EFFECT, None)
if LightEntityFeature.FLASH not in supported_features:
params.pop(ATTR_FLASH, None)
if LightEntityFeature.TRANSITION not in supported_features:
params.pop(ATTR_TRANSITION, None)
supported_color_modes = (
light._light_internal_supported_color_modes # pylint:disable=protected-access
)
if not brightness_supported(supported_color_modes):
params.pop(ATTR_BRIGHTNESS, None)
if ColorMode.COLOR_TEMP not in supported_color_modes:
params.pop(ATTR_COLOR_TEMP, None)
params.pop(ATTR_COLOR_TEMP_KELVIN, None)
if ColorMode.HS not in supported_color_modes:
params.pop(ATTR_HS_COLOR, None)
if ColorMode.RGB not in supported_color_modes:
params.pop(ATTR_RGB_COLOR, None)
if ColorMode.RGBW not in supported_color_modes:
params.pop(ATTR_RGBW_COLOR, None)
if ColorMode.RGBWW not in supported_color_modes:
params.pop(ATTR_RGBWW_COLOR, None)
if ColorMode.WHITE not in supported_color_modes:
params.pop(ATTR_WHITE, None)
if ColorMode.XY not in supported_color_modes:
params.pop(ATTR_XY_COLOR, None)
return params |
Coerce an empty string as None. | def _coerce_none(value: str) -> None:
"""Coerce an empty string as None."""
if not isinstance(value, str):
raise vol.Invalid("Expected a string")
if value:
raise vol.Invalid("Not an empty string") |
Rewrite legacy configuration to new format. | def rewrite_legacy(config: ConfigType) -> ConfigType:
"""Rewrite legacy configuration to new format."""
bridges = config.get(CONF_BRIDGES, [config])
new_bridges = []
for bridge_conf in bridges:
groups = []
if "groups" in bridge_conf:
groups = bridge_conf["groups"]
else:
_LOGGER.warning("Legacy configuration format detected")
for i in range(1, 5):
name_key = "group_%d_name" % i
if name_key in bridge_conf:
groups.append(
{
"number": i,
"type": bridge_conf.get(
"group_%d_type" % i, DEFAULT_LED_TYPE
),
"name": bridge_conf.get(name_key),
}
)
new_bridges.append(
{
"host": bridge_conf.get(CONF_HOST),
"version": bridge_conf.get(CONF_VERSION),
"port": bridge_conf.get(CONF_PORT),
"groups": groups,
}
)
return {"bridges": new_bridges} |
Set up the LimitlessLED lights. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the LimitlessLED lights."""
# Two legacy configuration formats are supported to maintain backwards
# compatibility.
config = rewrite_legacy(config)
# Use the expanded configuration format.
lights = []
bridge_conf: dict[str, Any]
group_conf: dict[str, Any]
for bridge_conf in config[CONF_BRIDGES]:
bridge = Bridge(
bridge_conf.get(CONF_HOST),
port=bridge_conf.get(CONF_PORT, DEFAULT_PORT),
version=bridge_conf.get(CONF_VERSION, DEFAULT_VERSION),
)
for group_conf in bridge_conf[CONF_GROUPS]:
group = bridge.add_group(
group_conf.get(CONF_NUMBER),
group_conf.get(CONF_NAME),
group_conf.get(CONF_TYPE, DEFAULT_LED_TYPE),
)
lights.append(LimitlessLEDGroup(group, {"fade": group_conf[CONF_FADE]}))
add_entities(lights) |
State decorator.
Specify True (turn on) or False (turn off). | def state(
new_state: bool,
) -> Callable[
[Callable[Concatenate[_LimitlessLEDGroupT, int, Pipeline, _P], Any]],
Callable[Concatenate[_LimitlessLEDGroupT, _P], None],
]:
"""State decorator.
Specify True (turn on) or False (turn off).
"""
def decorator(
function: Callable[Concatenate[_LimitlessLEDGroupT, int, Pipeline, _P], Any],
) -> Callable[Concatenate[_LimitlessLEDGroupT, _P], None]:
"""Set up the decorator function."""
def wrapper(
self: _LimitlessLEDGroupT, *args: _P.args, **kwargs: _P.kwargs
) -> None:
"""Wrap a group state change."""
pipeline = Pipeline()
transition_time = DEFAULT_TRANSITION
if self.effect == EFFECT_COLORLOOP:
self.group.stop()
self._attr_effect = None # pylint: disable=protected-access
# Set transition time.
if ATTR_TRANSITION in kwargs:
transition_time = int(cast(float, kwargs[ATTR_TRANSITION]))
# Do group type-specific work.
function(self, transition_time, pipeline, *args, **kwargs)
# Update state.
self._attr_is_on = new_state # pylint: disable=protected-access
self.group.enqueue(pipeline)
self.schedule_update_ha_state()
return wrapper
return decorator |
Validate the configuration and return a Linksys AP scanner. | def get_scanner(
hass: HomeAssistant, config: ConfigType
) -> LinksysSmartWifiDeviceScanner | None:
"""Validate the configuration and return a Linksys AP scanner."""
try:
return LinksysSmartWifiDeviceScanner(config[DOMAIN])
except ConnectionError:
return None |
Set up the Linode droplet sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Linode droplet sensor."""
linode = hass.data[DATA_LINODE]
nodes = config[CONF_NODES]
dev = []
for node in nodes:
if (node_id := linode.get_node_id(node)) is None:
_LOGGER.error("Node %s is not available", node)
return
dev.append(LinodeBinarySensor(linode, node_id))
add_entities(dev, True) |
Set up the Linode Node switch. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Linode Node switch."""
linode = hass.data[DATA_LINODE]
nodes = config[CONF_NODES]
dev = []
for node in nodes:
if (node_id := linode.get_node_id(node)) is None:
_LOGGER.error("Node %s is not available", node)
return
dev.append(LinodeSwitch(linode, node_id))
add_entities(dev, True) |
Set up the Linode component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Linode component."""
conf = config[DOMAIN]
access_token = conf.get(CONF_ACCESS_TOKEN)
_linode = Linode(access_token)
try:
_LOGGER.info("Linode Profile %s", _linode.manager.get_profile().username)
except linode.errors.ApiError as _ex:
_LOGGER.error(_ex)
return False
hass.data[DATA_LINODE] = _linode
return True |
Set up the Linux Battery sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Linux Battery sensor."""
name = config.get(CONF_NAME)
battery_id = config.get(CONF_BATTERY)
system = config.get(CONF_SYSTEM)
try:
if system == "android":
os.listdir(os.path.join(DEFAULT_PATH, "battery"))
else:
os.listdir(os.path.join(DEFAULT_PATH, f"BAT{battery_id}"))
except FileNotFoundError:
_LOGGER.error("No battery found")
return
add_entities([LinuxBatterySensor(name, battery_id, system)], True) |
Set up the LIRC capability. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the LIRC capability."""
# blocking=True gives unexpected behavior (multiple responses for 1 press)
# also by not blocking, we allow hass to shut down the thread gracefully
# on exit.
lirc.init("home-assistant", blocking=False)
lirc_interface = LircInterface(hass)
def _start_lirc(_event):
lirc_interface.start()
def _stop_lirc(_event):
lirc_interface.stopped.set()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_lirc)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_lirc)
return True |
Return a gauge icon valid identifier. | def icon_for_gauge_level(gauge_level: int | None = None, offset: int = 0) -> str:
"""Return a gauge icon valid identifier."""
if gauge_level is None or gauge_level <= 0 + offset:
return "mdi:gauge-empty"
if gauge_level > 70 + offset:
return "mdi:gauge-full"
if gauge_level > 30 + offset:
return "mdi:gauge"
return "mdi:gauge-low" |
Return a datetime as local time. | def _as_local_time(start: datetime | None) -> time | None:
"""Return a datetime as local time."""
return dt_util.as_local(start).time() if start else None |
Get platforms for robots. | def get_platforms_for_robots(robots: list[Robot]) -> set[Platform]:
"""Get platforms for robots."""
return {
platform
for robot in robots
for robot_type, platforms in PLATFORMS_BY_TYPE.items()
if isinstance(robot, robot_type)
for platform in platforms
} |
Get the LlamaLab Automate notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> AutomateNotificationService:
"""Get the LlamaLab Automate notification service."""
secret = config.get(CONF_API_KEY)
recipient = config.get(CONF_TO)
device = config.get(CONF_DEVICE)
return AutomateNotificationService(secret, recipient, device) |
Parse an ical event from a home assistant event dictionary. | def _parse_event(event: dict[str, Any]) -> Event:
"""Parse an ical event from a home assistant event dictionary."""
if rrule := event.get(EVENT_RRULE):
event[EVENT_RRULE] = Recur.from_rrule(rrule)
# This function is called with new events created in the local timezone,
# however ical library does not properly return recurrence_ids for
# start dates with a timezone. For now, ensure any datetime is stored as a
# floating local time to ensure we still apply proper local timezone rules.
# This can be removed when ical is updated with a new recurrence_id format
# https://github.com/home-assistant/core/issues/87759
for key in (EVENT_START, EVENT_END):
if (
(value := event[key])
and isinstance(value, datetime)
and value.tzinfo is not None
):
event[key] = dt_util.as_local(value).replace(tzinfo=None)
try:
return Event(**event)
except CalendarParseError as err:
_LOGGER.debug("Error parsing event input fields: %s (%s)", event, str(err))
raise vol.Invalid("Error parsing event input fields") from err |
Return a CalendarEvent from an API event. | def _get_calendar_event(event: Event) -> CalendarEvent:
"""Return a CalendarEvent from an API event."""
start: datetime | date
end: datetime | date
if isinstance(event.start, datetime) and isinstance(event.end, datetime):
start = dt_util.as_local(event.start)
end = dt_util.as_local(event.end)
if (end - start) <= timedelta(seconds=0):
end = start + timedelta(minutes=30)
else:
start = event.start
end = event.end
if (end - start) < timedelta(days=0):
end = start + timedelta(days=1)
return CalendarEvent(
summary=event.summary,
start=start,
end=end,
description=event.description,
uid=event.uid,
rrule=event.rrule.as_rrule_str() if event.rrule else None,
recurrence_id=event.recurrence_id,
location=event.location,
) |
Set up the Camera that works with local files. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Camera that works with local files."""
if DATA_LOCAL_FILE not in hass.data:
hass.data[DATA_LOCAL_FILE] = []
file_path = config[CONF_FILE_PATH]
camera = LocalFile(config[CONF_NAME], file_path)
hass.data[DATA_LOCAL_FILE].append(camera)
def update_file_path_service(call: ServiceCall) -> None:
"""Update the file path."""
file_path = call.data[CONF_FILE_PATH]
entity_ids = call.data[ATTR_ENTITY_ID]
cameras = hass.data[DATA_LOCAL_FILE]
for camera in cameras:
if camera.entity_id in entity_ids:
camera.update_file_path(file_path)
hass.services.register(
DOMAIN,
SERVICE_UPDATE_FILE_PATH,
update_file_path_service,
schema=CAMERA_SERVICE_UPDATE_FILE_PATH,
)
add_entities([camera]) |
Upgrade due dates to rfc5545 format.
In rfc5545 due dates are exclusive, however we previously set the due date
as inclusive based on what the user set in the UI. A task is considered
overdue at midnight at the start of a date so we need to shift the due date
to the next day for old calendar versions. | def _migrate_calendar(calendar: Calendar) -> bool:
"""Upgrade due dates to rfc5545 format.
In rfc5545 due dates are exclusive, however we previously set the due date
as inclusive based on what the user set in the UI. A task is considered
overdue at midnight at the start of a date so we need to shift the due date
to the next day for old calendar versions.
"""
if calendar.prodid is None or calendar.prodid != PRODID_REQUIRES_MIGRATION:
return False
migrated = False
for todo in calendar.todos:
if todo.due is None or isinstance(todo.due, datetime.datetime):
continue
todo.due += datetime.timedelta(days=1)
migrated = True
return migrated |
Convert a HomeAssistant TodoItem to an ical Todo. | def _convert_item(item: TodoItem) -> Todo:
"""Convert a HomeAssistant TodoItem to an ical Todo."""
todo = Todo()
if item.uid:
todo.uid = item.uid
if item.summary:
todo.summary = item.summary
if item.status:
todo.status = ICS_TODO_STATUS_MAP_INV[item.status]
todo.due = item.due
if todo.due and not isinstance(todo.due, datetime.datetime):
todo.due += datetime.timedelta(days=1)
todo.description = item.description
return todo |
Coerce id by removing '-'. | def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "") |
Validate that id is provided outside of test mode. | def _validate_test_mode(obj: dict) -> dict:
"""Validate that id is provided outside of test mode."""
if ATTR_ID not in obj and obj[ATTR_TRIGGER] != "test":
raise vol.Invalid("Location id not specified")
return obj |
Create a function to test a device condition. | def async_condition_from_config(
hass: HomeAssistant, config: ConfigType
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config[CONF_TYPE] == "is_jammed":
state = STATE_JAMMED
elif config[CONF_TYPE] == "is_locking":
state = STATE_LOCKING
elif config[CONF_TYPE] == "is_unlocking":
state = STATE_UNLOCKING
elif config[CONF_TYPE] == "is_locked":
state = STATE_LOCKED
else:
state = STATE_UNLOCKED
registry = er.async_get(hass)
entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID])
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
return condition.state(hass, entity_id, state)
return test_is_state |
Describe group on off states. | def async_describe_on_off_states(
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
) -> None:
"""Describe group on off states."""
registry.on_off_states(DOMAIN, {STATE_UNLOCKED}, STATE_UNLOCKED, STATE_LOCKED) |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if old_state != new_state:
return True
return False |
Filter out any entities that logbook will not produce results for. | def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[str]:
"""Filter out any entities that logbook will not produce results for."""
ent_reg = er.async_get(hass)
return [
entity_id
for entity_id in entity_ids
if split_entity_id(entity_id)[0] not in ALWAYS_CONTINUOUS_DOMAINS
and not is_sensor_continuous(hass, ent_reg, entity_id)
] |
Find the config entry ids for a set of entities or devices. | def _async_config_entries_for_ids(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> set[str]:
"""Find the config entry ids for a set of entities or devices."""
config_entry_ids: set[str] = set()
if entity_ids:
eng_reg = er.async_get(hass)
for entity_id in entity_ids:
if (entry := eng_reg.async_get(entity_id)) and entry.config_entry_id:
config_entry_ids.add(entry.config_entry_id)
if device_ids:
dev_reg = dr.async_get(hass)
for device_id in device_ids:
if (device := dev_reg.async_get(device_id)) and device.config_entries:
config_entry_ids |= device.config_entries
return config_entry_ids |
Reduce the event types based on the entity ids and device ids. | def async_determine_event_types(
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
) -> tuple[EventType[Any] | str, ...]:
"""Reduce the event types based on the entity ids and device ids."""
logbook_config: LogbookConfig = hass.data[DOMAIN]
external_events = logbook_config.external_events
if not entity_ids and not device_ids:
return (*BUILT_IN_EVENTS, *external_events)
interested_domains: set[str] = set()
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
if entry := hass.config_entries.async_get_entry(entry_id):
interested_domains.add(entry.domain)
#
# automations and scripts can refer to entities or devices
# but they do not have a config entry so we need
# to add them since we have historically included
# them when matching only on entities
#
intrested_event_types: set[EventType[Any] | str] = {
external_event
for external_event, domain_call in external_events.items()
if domain_call[0] in interested_domains
} | AUTOMATION_EVENTS
if entity_ids:
# We also allow entity_ids to be recorded via manual logbook entries.
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
return tuple(intrested_event_types) |
Extract an attribute as a list or string. | def extract_attr(source: Mapping[str, Any], attr: str) -> list[str]:
"""Extract an attribute as a list or string."""
if (value := source.get(attr)) is None:
return []
if isinstance(value, list):
return value
return str(value).split(",") |
Make a callable to filter events. | def event_forwarder_filtered(
target: Callable[[Event], None],
entities_filter: Callable[[str], bool] | None,
entity_ids: list[str] | None,
device_ids: list[str] | None,
) -> Callable[[Event], None]:
"""Make a callable to filter events."""
if not entities_filter and not entity_ids and not device_ids:
# No filter
# - Script Trace (context ids)
# - Automation Trace (context ids)
return target
if entities_filter:
# We have an entity filter:
# - Logbook panel
@callback
def _forward_events_filtered_by_entities_filter(event: Event) -> None:
assert entities_filter is not None
event_data = event.data
entity_ids = extract_attr(event_data, ATTR_ENTITY_ID)
if entity_ids and not any(
entities_filter(entity_id) for entity_id in entity_ids
):
return
domain = event_data.get(ATTR_DOMAIN)
if domain and not entities_filter(f"{domain}._"):
return
target(event)
return _forward_events_filtered_by_entities_filter
# We are filtering on entity_ids and/or device_ids:
# - Areas
# - Devices
# - Logbook Card
entity_ids_set = set(entity_ids) if entity_ids else set()
device_ids_set = set(device_ids) if device_ids else set()
@callback
def _forward_events_filtered_by_device_entity_ids(event: Event) -> None:
event_data = event.data
if entity_ids_set.intersection(
extract_attr(event_data, ATTR_ENTITY_ID)
) or device_ids_set.intersection(extract_attr(event_data, ATTR_DEVICE_ID)):
target(event)
return _forward_events_filtered_by_device_entity_ids |
Subscribe to events for the entities and devices or all.
These are the events we need to listen for to do
the live logbook stream. | def async_subscribe_events(
hass: HomeAssistant,
subscriptions: list[CALLBACK_TYPE],
target: Callable[[Event[Any]], None],
event_types: tuple[EventType[Any] | str, ...],
entities_filter: Callable[[str], bool] | None,
entity_ids: list[str] | None,
device_ids: list[str] | None,
) -> None:
"""Subscribe to events for the entities and devices or all.
These are the events we need to listen for to do
the live logbook stream.
"""
assert is_callback(target), "target must be a callback"
event_forwarder = event_forwarder_filtered(
target, entities_filter, entity_ids, device_ids
)
subscriptions.extend(
hass.bus.async_listen(event_type, event_forwarder) for event_type in event_types
)
if device_ids and not entity_ids:
# No entities to subscribe to but we are filtering
# on device ids so we do not want to get any state
# changed events
return
@callback
def _forward_state_events_filtered(event: Event[EventStateChangedData]) -> None:
if (old_state := event.data["old_state"]) is None or (
new_state := event.data["new_state"]
) is None:
return
if _is_state_filtered(new_state, old_state) or (
entities_filter and not entities_filter(new_state.entity_id)
):
return
target(event)
if entity_ids:
subscriptions.append(
async_track_state_change_event(
hass, entity_ids, _forward_state_events_filtered
)
)
return
# We want the firehose
subscriptions.append(
hass.bus.async_listen(
EVENT_STATE_CHANGED,
_forward_state_events_filtered,
)
) |
Determine if a sensor is continuous.
Sensors with a unit_of_measurement or state_class are considered continuous.
The unit_of_measurement check will already happen if this is
called for historical data because the SQL query generated by _get_events
will filter out any sensors with a unit_of_measurement.
If the state still exists in the state machine, this function still
checks for ATTR_UNIT_OF_MEASUREMENT since the live mode is not filtered
by the SQL query. | def is_sensor_continuous(
hass: HomeAssistant, ent_reg: er.EntityRegistry, entity_id: str
) -> bool:
"""Determine if a sensor is continuous.
Sensors with a unit_of_measurement or state_class are considered continuous.
The unit_of_measurement check will already happen if this is
called for historical data because the SQL query generated by _get_events
will filter out any sensors with a unit_of_measurement.
If the state still exists in the state machine, this function still
checks for ATTR_UNIT_OF_MEASUREMENT since the live mode is not filtered
by the SQL query.
"""
# If it is in the state machine we can quick check if it
# has a unit_of_measurement or state_class, and filter if
# it does
if (state := hass.states.get(entity_id)) and (attributes := state.attributes):
return ATTR_UNIT_OF_MEASUREMENT in attributes or ATTR_STATE_CLASS in attributes
# If its not in the state machine, we need to check
# the entity registry to see if its a sensor
# filter with a state class. We do not check
# for unit_of_measurement since the SQL query
# will filter out any sensors with a unit_of_measurement
# and we should never get here in live mode because
# the state machine will always have the state.
return bool(
(entry := ent_reg.async_get(entity_id))
and entry.capabilities
and entry.capabilities.get(ATTR_STATE_CLASS)
) |
Check if the logbook should filter a state.
Used when we are in live mode to ensure
we only get significant changes (state.last_changed != state.last_updated) | def _is_state_filtered(new_state: State, old_state: State) -> bool:
"""Check if the logbook should filter a state.
Used when we are in live mode to ensure
we only get significant changes (state.last_changed != state.last_updated)
"""
return bool(
new_state.state == old_state.state
or new_state.last_changed != new_state.last_updated
or new_state.domain in ALWAYS_CONTINUOUS_DOMAINS
or ATTR_UNIT_OF_MEASUREMENT in new_state.attributes
or ATTR_STATE_CLASS in new_state.attributes
) |
Convert an event to a row. | def async_event_to_row(event: Event) -> EventAsRow:
"""Convert an event to a row."""
if event.event_type != EVENT_STATE_CHANGED:
context = event.context
return EventAsRow(
data=event.data,
context=event.context,
event_type=event.event_type,
context_id_bin=ulid_to_bytes(context.id),
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
time_fired_ts=event.time_fired_timestamp,
row_id=hash(event),
)
# States are prefiltered so we never get states
# that are missing new_state or old_state
# since the logbook does not show these
new_state: State = event.data["new_state"]
context = new_state.context
return EventAsRow(
data=event.data,
context=event.context,
entity_id=new_state.entity_id,
state=new_state.state,
context_id_bin=ulid_to_bytes(context.id),
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
time_fired_ts=new_state.last_updated_timestamp,
row_id=hash(event),
icon=new_state.attributes.get(ATTR_ICON),
) |
Generate a converted list of events into entries. | def _humanify(
hass: HomeAssistant,
rows: Generator[EventAsRow, None, None] | Sequence[Row] | Result,
ent_reg: er.EntityRegistry,
logbook_run: LogbookRun,
context_augmenter: ContextAugmenter,
) -> Generator[dict[str, Any], None, None]:
"""Generate a converted list of events into entries."""
# Continuous sensors, will be excluded from the logbook
continuous_sensors: dict[str, bool] = {}
context_lookup = logbook_run.context_lookup
external_events = logbook_run.external_events
event_cache = logbook_run.event_cache
entity_name_cache = logbook_run.entity_name_cache
include_entity_name = logbook_run.include_entity_name
format_time = logbook_run.format_time
memoize_new_contexts = logbook_run.memoize_new_contexts
memoize_context = context_lookup.setdefault
# Process rows
for row in rows:
context_id_bin: bytes = row.context_id_bin
if memoize_new_contexts:
memoize_context(context_id_bin, row)
if row.context_only:
continue
event_type = row.event_type
if event_type == EVENT_CALL_SERVICE:
continue
if event_type is PSEUDO_EVENT_STATE_CHANGED:
entity_id = row.entity_id
assert entity_id is not None
# Skip continuous sensors
if (
is_continuous := continuous_sensors.get(entity_id)
) is None and split_entity_id(entity_id)[0] == SENSOR_DOMAIN:
is_continuous = is_sensor_continuous(hass, ent_reg, entity_id)
continuous_sensors[entity_id] = is_continuous
if is_continuous:
continue
data = {
LOGBOOK_ENTRY_WHEN: format_time(row),
LOGBOOK_ENTRY_STATE: row.state,
LOGBOOK_ENTRY_ENTITY_ID: entity_id,
}
if include_entity_name:
data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id)
if icon := row.icon:
data[LOGBOOK_ENTRY_ICON] = icon
context_augmenter.augment(data, row, context_id_bin)
yield data
elif event_type in external_events:
domain, describe_event = external_events[event_type]
try:
data = describe_event(event_cache.get(row))
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Error with %s describe event for %s", domain, event_type
)
continue
data[LOGBOOK_ENTRY_WHEN] = format_time(row)
data[LOGBOOK_ENTRY_DOMAIN] = domain
context_augmenter.augment(data, row, context_id_bin)
yield data
elif event_type == EVENT_LOGBOOK_ENTRY:
event = event_cache.get(row)
if not (event_data := event.data):
continue
entry_domain = event_data.get(ATTR_DOMAIN)
entry_entity_id = event_data.get(ATTR_ENTITY_ID)
if entry_domain is None and entry_entity_id is not None:
with suppress(IndexError):
entry_domain = split_entity_id(str(entry_entity_id))[0]
data = {
LOGBOOK_ENTRY_WHEN: format_time(row),
LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME),
LOGBOOK_ENTRY_MESSAGE: event_data.get(ATTR_MESSAGE),
LOGBOOK_ENTRY_DOMAIN: entry_domain,
LOGBOOK_ENTRY_ENTITY_ID: entry_entity_id,
}
context_augmenter.augment(data, row, context_id_bin)
yield data |
Check of rows match by using the same method as Events __hash__. | def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
"""Check of rows match by using the same method as Events __hash__."""
return bool(
row is other_row or (row_id := row.row_id) and row_id == other_row.row_id
) |
Convert the row timed_fired to isoformat. | def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
"""Convert the row timed_fired to isoformat."""
return process_timestamp_to_utc_isoformat(
dt_util.utc_from_timestamp(row.time_fired_ts) or dt_util.utcnow()
) |
Convert the row timed_fired to timestamp. | def _row_time_fired_timestamp(row: Row | EventAsRow) -> float:
"""Convert the row timed_fired to timestamp."""
return row.time_fired_ts or process_datetime_to_timestamp(dt_util.utcnow()) |
Set up the logbook rest API. | def async_setup(
hass: HomeAssistant,
conf: ConfigType,
filters: Filters | None,
entities_filter: Callable[[str], bool] | None,
) -> None:
"""Set up the logbook rest API."""
hass.http.register_view(LogbookView(conf, filters, entities_filter)) |
Set up the logbook websocket API. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the logbook websocket API."""
websocket_api.async_register_command(hass, ws_get_events)
websocket_api.async_register_command(hass, ws_event_stream) |
Send an empty response.
The current case for this is when they ask for entity_ids
that will all be filtered away because they have UOMs or
state_class. | def _async_send_empty_response(
connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None
) -> None:
"""Send an empty response.
The current case for this is when they ask for entity_ids
that will all be filtered away because they have UOMs or
state_class.
"""
connection.send_result(msg_id)
stream_end_time = end_time or dt_util.utcnow()
empty_stream_message = _generate_stream_message([], start_time, stream_end_time)
empty_response = messages.event_message(msg_id, empty_stream_message)
connection.send_message(json_bytes(empty_response)) |
Generate a logbook stream message response. | def _generate_stream_message(
events: list[dict[str, Any]], start_day: dt, end_day: dt
) -> dict[str, Any]:
"""Generate a logbook stream message response."""
return {
"events": events,
"start_time": start_day.timestamp(),
"end_time": end_day.timestamp(),
} |
Fetch events and convert them to json in the executor. | def _ws_stream_get_events(
msg_id: int,
start_day: dt,
end_day: dt,
formatter: Callable[[int, Any], dict[str, Any]],
event_processor: EventProcessor,
partial: bool,
) -> tuple[bytes, dt | None]:
"""Fetch events and convert them to json in the executor."""
events = event_processor.get_events(start_day, end_day)
last_time = None
if events:
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
message = _generate_stream_message(events, start_day, end_day)
if partial:
# This is a hint to consumers of the api that
# we are about to send a another block of historical
# data in case the UI needs to show that historical
# data is still loading in the future
message["partial"] = True
return json_bytes(formatter(msg_id, message)), last_time |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.