response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Check if the node matches the ISY Z-Wave Category for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Z-Wave Devices with the devtype.cat property. | def _check_for_zwave_cat(
isy_data: IsyData, node: Group | Node, single_platform: Platform | None = None
) -> bool:
"""Check if the node matches the ISY Z-Wave Category for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Z-Wave Devices with the devtype.cat property.
"""
if node.protocol != PROTO_ZWAVE:
return False
if not hasattr(node, "zwave_props") or node.zwave_props is None:
# Node doesn't have a device type category (non-Z-Wave device)
return False
device_type = node.zwave_props.category
platforms = NODE_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if any(
device_type.startswith(t)
for t in set(NODE_FILTERS[platform][FILTER_ZWAVE_CAT])
):
isy_data.nodes[platform].append(node)
return True
return False |
Check if a node's uom matches any of the platforms uom filter.
This is used for versions of the ISY firmware that report uoms as a single
ID. We can often infer what type of device it is by that ID. | def _check_for_uom_id(
isy_data: IsyData,
node: Group | Node,
single_platform: Platform | None = None,
uom_list: list[str] | None = None,
) -> bool:
"""Check if a node's uom matches any of the platforms uom filter.
This is used for versions of the ISY firmware that report uoms as a single
ID. We can often infer what type of device it is by that ID.
"""
if not hasattr(node, "uom") or node.uom in (None, ""):
# Node doesn't have a uom (Scenes for example)
return False
# Backwards compatibility for ISYv4 Firmware:
node_uom = node.uom
if isinstance(node.uom, list):
node_uom = node.uom[0]
if uom_list and single_platform:
if node_uom in uom_list:
isy_data.nodes[single_platform].append(node)
return True
return False
platforms = NODE_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_uom in NODE_FILTERS[platform][FILTER_UOM]:
isy_data.nodes[platform].append(node)
return True
return False |
Check if a list of uoms matches two possible filters.
This is for versions of the ISY firmware that report uoms as a list of all
possible "human readable" states. This filter passes if all of the possible
states fit inside the given filter. | def _check_for_states_in_uom(
isy_data: IsyData,
node: Group | Node,
single_platform: Platform | None = None,
states_list: list[str] | None = None,
) -> bool:
"""Check if a list of uoms matches two possible filters.
This is for versions of the ISY firmware that report uoms as a list of all
possible "human readable" states. This filter passes if all of the possible
states fit inside the given filter.
"""
if not hasattr(node, "uom") or node.uom in (None, ""):
# Node doesn't have a uom (Scenes for example)
return False
# This only works for ISYv4 Firmware where uom is a list of states:
if not isinstance(node.uom, list):
return False
node_uom = set(map(str.lower, node.uom))
if states_list and single_platform:
if node_uom == set(states_list):
isy_data.nodes[single_platform].append(node)
return True
return False
platforms = NODE_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_uom == set(NODE_FILTERS[platform][FILTER_STATES]):
isy_data.nodes[platform].append(node)
return True
return False |
Determine if the given sensor node should be a binary_sensor. | def _is_sensor_a_binary_sensor(isy_data: IsyData, node: Group | Node) -> bool:
"""Determine if the given sensor node should be a binary_sensor."""
if _check_for_node_def(isy_data, node, single_platform=Platform.BINARY_SENSOR):
return True
if _check_for_insteon_type(isy_data, node, single_platform=Platform.BINARY_SENSOR):
return True
# For the next two checks, we're providing our own set of uoms that
# represent on/off devices. This is because we can only depend on these
# checks in the context of already knowing that this is definitely a
# sensor device.
if _check_for_uom_id(
isy_data,
node,
single_platform=Platform.BINARY_SENSOR,
uom_list=BINARY_SENSOR_UOMS,
):
return True
if _check_for_states_in_uom(
isy_data,
node,
single_platform=Platform.BINARY_SENSOR,
states_list=BINARY_SENSOR_ISY_STATES,
):
return True
return False |
Check if a node supports setting a backlight and add entity. | def _add_backlight_if_supported(isy_data: IsyData, node: Node) -> None:
"""Check if a node supports setting a backlight and add entity."""
if not getattr(node, "is_backlight_supported", False):
return
if BACKLIGHT_SUPPORT[node.node_def_id] == UOM_INDEX:
isy_data.aux_properties[Platform.SELECT].append((node, CMD_BACKLIGHT))
return
isy_data.aux_properties[Platform.NUMBER].append((node, CMD_BACKLIGHT)) |
Generate the device info for a root node device. | def _generate_device_info(node: Node) -> DeviceInfo:
"""Generate the device info for a root node device."""
isy = node.isy
device_info = DeviceInfo(
identifiers={(DOMAIN, f"{isy.uuid}_{node.address}")},
manufacturer=node.protocol.title(),
name=node.name,
via_device=(DOMAIN, isy.uuid),
configuration_url=isy.conn.url,
suggested_area=node.folder,
)
# ISYv5 Device Types can provide model and manufacturer
model: str = str(node.address).rpartition(" ")[0] or node.address
if node.node_def_id is not None:
model += f": {node.node_def_id}"
# Numerical Device Type
if node.type is not None:
model += f" ({node.type})"
# Get extra information for Z-Wave Devices
if (
node.protocol == PROTO_ZWAVE
and node.zwave_props
and node.zwave_props.mfr_id != "0"
):
device_info[ATTR_MANUFACTURER] = (
f"Z-Wave MfrID:{int(node.zwave_props.mfr_id):#0{6}x}"
)
model += (
f"Type:{int(node.zwave_props.prod_type_id):#0{6}x} "
f"Product:{int(node.zwave_props.product_id):#0{6}x}"
)
device_info[ATTR_MODEL] = model
return device_info |
Sort the nodes to their proper platforms. | def _categorize_nodes(
isy_data: IsyData, nodes: Nodes, ignore_identifier: str, sensor_identifier: str
) -> None:
"""Sort the nodes to their proper platforms."""
for path, node in nodes:
ignored = ignore_identifier in path or ignore_identifier in node.name
if ignored:
# Don't import this node as a device at all
continue
if hasattr(node, "parent_node") and node.parent_node is None:
# This is a physical device / parent node
isy_data.devices[node.address] = _generate_device_info(node)
isy_data.root_nodes[Platform.BUTTON].append(node)
# Any parent node can have communication errors:
isy_data.aux_properties[Platform.SENSOR].append((node, PROP_COMMS_ERROR))
# Add Ramp Rate and On Levels for Dimmable Load devices
if getattr(node, "is_dimmable", False):
aux_controls = ROOT_AUX_CONTROLS.intersection(node.aux_properties)
for control in aux_controls:
platform = NODE_AUX_FILTERS[control]
isy_data.aux_properties[platform].append((node, control))
if hasattr(node, TAG_ENABLED):
isy_data.aux_properties[Platform.SWITCH].append((node, TAG_ENABLED))
_add_backlight_if_supported(isy_data, node)
if node.protocol == PROTO_GROUP:
isy_data.nodes[ISY_GROUP_PLATFORM].append(node)
continue
if node.protocol == PROTO_INSTEON:
for control in node.aux_properties:
if control in SKIP_AUX_PROPS:
continue
isy_data.aux_properties[Platform.SENSOR].append((node, control))
if sensor_identifier in path or sensor_identifier in node.name:
# User has specified to treat this as a sensor. First we need to
# determine if it should be a binary_sensor.
if _is_sensor_a_binary_sensor(isy_data, node):
continue
isy_data.nodes[Platform.SENSOR].append(node)
continue
# We have a bunch of different methods for determining the device type,
# each of which works with different ISY firmware versions or device
# family. The order here is important, from most reliable to least.
if _check_for_node_def(isy_data, node):
continue
if _check_for_insteon_type(isy_data, node):
continue
if _check_for_zwave_cat(isy_data, node):
continue
if _check_for_uom_id(isy_data, node):
continue
if _check_for_states_in_uom(isy_data, node):
continue
# Fallback as as sensor, e.g. for un-sortable items like NodeServer nodes.
isy_data.nodes[Platform.SENSOR].append(node) |
Categorize the ISY programs. | def _categorize_programs(isy_data: IsyData, programs: Programs) -> None:
"""Categorize the ISY programs."""
for platform in PROGRAM_PLATFORMS:
folder = programs.get_by_name(f"{DEFAULT_PROGRAM_STRING}{platform}")
if not folder:
continue
for dtype, _, node_id in folder.children:
if dtype != TAG_FOLDER:
continue
entity_folder = folder[node_id]
actions = None
status = entity_folder.get_by_name(KEY_STATUS)
if not status or status.protocol != PROTO_PROGRAM:
_LOGGER.warning(
"Program %s entity '%s' not loaded, invalid/missing status program",
platform,
entity_folder.name,
)
continue
if platform != Platform.BINARY_SENSOR:
actions = entity_folder.get_by_name(KEY_ACTIONS)
if not actions or actions.protocol != PROTO_PROGRAM:
_LOGGER.warning(
(
"Program %s entity '%s' not loaded, invalid/missing actions"
" program"
),
platform,
entity_folder.name,
)
continue
entity = (entity_folder.name, status, actions)
isy_data.programs[platform].append(entity) |
Fix ISY Reported Values.
ISY provides float values as an integer and precision component.
Correct by shifting the decimal place left by the value of precision.
(e.g. value=2345, prec="2" == 23.45)
Insteon Thermostats report temperature in 0.5-deg precision as an int
by sending a value of 2 times the Temp. Correct by dividing by 2 here. | def convert_isy_value_to_hass(
value: float | None,
uom: str | None,
precision: int | str,
fallback_precision: int | None = None,
) -> float | int | None:
"""Fix ISY Reported Values.
ISY provides float values as an integer and precision component.
Correct by shifting the decimal place left by the value of precision.
(e.g. value=2345, prec="2" == 23.45)
Insteon Thermostats report temperature in 0.5-deg precision as an int
by sending a value of 2 times the Temp. Correct by dividing by 2 here.
"""
if value is None or value == ISY_VALUE_UNKNOWN:
return None
if uom in (UOM_DOUBLE_TEMP, UOM_ISYV4_DEGREES):
return round(float(value) / 2.0, 1)
if precision not in ("0", 0):
return cast(float, round(float(value) / 10 ** int(precision), int(precision)))
if fallback_precision:
return round(float(value), fallback_precision)
return value |
Create lock-specific services for the ISY Integration. | def async_setup_lock_services(hass: HomeAssistant) -> None:
"""Create lock-specific services for the ISY Integration."""
platform = async_get_current_platform()
platform.async_register_entity_service(
SERVICE_SET_ZWAVE_LOCK_USER_CODE,
SERVICE_SET_USER_CODE_SCHEMA,
"async_set_zwave_lock_user_code",
)
platform.async_register_entity_service(
SERVICE_DELETE_ZWAVE_LOCK_USER_CODE,
SERVICE_DELETE_USER_CODE_SCHEMA,
"async_delete_zwave_lock_user_code",
) |
Return a formatted ramp rate time string. | def time_string(i: int) -> str:
"""Return a formatted ramp rate time string."""
if i >= 60:
return f"{(float(i)/60):.1f} {UnitOfTime.MINUTES}"
return f"{i} {UnitOfTime.SECONDS}" |
Validate the command is valid. | def valid_isy_commands(value: Any) -> str:
"""Validate the command is valid."""
value = str(value).upper()
if value in COMMAND_FRIENDLY_NAME:
assert isinstance(value, str)
return value
raise vol.Invalid("Invalid ISY Command.") |
Get entities for a domain. | def async_get_entities(hass: HomeAssistant) -> dict[str, Entity]:
"""Get entities for a domain."""
entities: dict[str, Entity] = {}
for platform in async_get_platforms(hass, DOMAIN):
entities.update(platform.entities)
return entities |
Create and register services for the ISY integration. | def async_setup_services(hass: HomeAssistant) -> None:
"""Create and register services for the ISY integration."""
existing_services = hass.services.async_services_for_domain(DOMAIN)
if existing_services and SERVICE_SEND_PROGRAM_COMMAND in existing_services:
# Integration-level services have already been added. Return.
return
async def async_send_program_command_service_handler(service: ServiceCall) -> None:
"""Handle a send program command service call."""
address = service.data.get(CONF_ADDRESS)
name = service.data.get(CONF_NAME)
command = service.data[CONF_COMMAND]
isy_name = service.data.get(CONF_ISY)
for config_entry_id in hass.data[DOMAIN]:
isy_data = hass.data[DOMAIN][config_entry_id]
isy = isy_data.root
if isy_name and isy_name != isy.conf["name"]:
continue
program = None
if address:
program = isy.programs.get_by_id(address)
if name:
program = isy.programs.get_by_name(name)
if program is not None:
await getattr(program, command)()
return
_LOGGER.error("Could not send program command; not found or enabled on the ISY")
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_SEND_PROGRAM_COMMAND,
service_func=async_send_program_command_service_handler,
schema=SERVICE_SEND_PROGRAM_COMMAND_SCHEMA,
)
async def _async_send_raw_node_command(call: ServiceCall) -> None:
await entity_service_call(
hass, async_get_entities(hass), "async_send_raw_node_command", call
)
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_SEND_RAW_NODE_COMMAND,
schema=cv.make_entity_service_schema(SERVICE_SEND_RAW_NODE_COMMAND_SCHEMA),
service_func=_async_send_raw_node_command,
)
async def _async_send_node_command(call: ServiceCall) -> None:
await entity_service_call(
hass, async_get_entities(hass), "async_send_node_command", call
)
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_SEND_NODE_COMMAND,
schema=cv.make_entity_service_schema(SERVICE_SEND_NODE_COMMAND_SCHEMA),
service_func=_async_send_node_command,
)
async def _async_get_zwave_parameter(call: ServiceCall) -> None:
await entity_service_call(
hass, async_get_entities(hass), "async_get_zwave_parameter", call
)
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_GET_ZWAVE_PARAMETER,
schema=cv.make_entity_service_schema(SERVICE_GET_ZWAVE_PARAMETER_SCHEMA),
service_func=_async_get_zwave_parameter,
)
async def _async_set_zwave_parameter(call: ServiceCall) -> None:
await entity_service_call(
hass, async_get_entities(hass), "async_set_zwave_parameter", call
)
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_SET_ZWAVE_PARAMETER,
schema=cv.make_entity_service_schema(SERVICE_SET_ZWAVE_PARAMETER_SCHEMA),
service_func=_async_set_zwave_parameter,
)
async def _async_rename_node(call: ServiceCall) -> None:
await entity_service_call(
hass, async_get_entities(hass), "async_rename_node", call
)
hass.services.async_register(
domain=DOMAIN,
service=SERVICE_RENAME_NODE,
schema=cv.make_entity_service_schema(SERVICE_RENAME_NODE_SCHEMA),
service_func=_async_rename_node,
) |
Unload services for the ISY integration. | def async_unload_services(hass: HomeAssistant) -> None:
"""Unload services for the ISY integration."""
if hass.data[DOMAIN]:
# There is still another config entry for this domain, don't remove services.
return
existing_services = hass.services.async_services_for_domain(DOMAIN)
if not existing_services or SERVICE_SEND_PROGRAM_COMMAND not in existing_services:
return
_LOGGER.info("Unloading ISY994 Services")
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_PROGRAM_COMMAND)
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_RAW_NODE_COMMAND)
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SEND_NODE_COMMAND)
hass.services.async_remove(domain=DOMAIN, service=SERVICE_GET_ZWAVE_PARAMETER)
hass.services.async_remove(domain=DOMAIN, service=SERVICE_SET_ZWAVE_PARAMETER) |
Register system health callbacks. | def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info) |
Remove extra entities that are no longer part of the integration. | def _async_cleanup_registry_entries(hass: HomeAssistant, entry_id: str) -> None:
"""Remove extra entities that are no longer part of the integration."""
entity_registry = er.async_get(hass)
isy_data = hass.data[DOMAIN][entry_id]
existing_entries = er.async_entries_for_config_entry(entity_registry, entry_id)
entities = {
(entity.domain, entity.unique_id): entity.entity_id
for entity in existing_entries
}
extra_entities = set(entities.keys()).difference(isy_data.unique_ids)
if not extra_entities:
return
for entity in extra_entities:
if entity_registry.async_is_registered(entities[entity]):
entity_registry.async_remove(entities[entity])
_LOGGER.debug(
("Cleaning up ISY entities: removed %s extra entities for config entry %s"),
len(extra_entities),
entry_id,
) |
Create device info for ISY service devices. | def _create_service_device_info(isy: ISY, name: str, unique_id: str) -> DeviceInfo:
"""Create device info for ISY service devices."""
return DeviceInfo(
identifiers={
(
DOMAIN,
f"{isy.uuid}_{unique_id}",
)
},
manufacturer=MANUFACTURER,
name=f"{isy.conf[ISY_CONF_NAME]} {name}",
model=isy.conf[ISY_CONF_MODEL],
sw_version=isy.conf[ISY_CONF_FIRMWARE],
configuration_url=isy.conn.url,
via_device=(DOMAIN, isy.uuid),
entry_type=DeviceEntryType.SERVICE,
) |
Set up the ITach connection and devices. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the ITach connection and devices."""
itachip2ir = pyitachip2ir.ITachIP2IR(
config.get(CONF_MAC), config[CONF_HOST], int(config[CONF_PORT])
)
if not itachip2ir.ready(CONNECT_TIMEOUT):
_LOGGER.error("Unable to find iTach")
return
devices = []
for data in config[CONF_DEVICES]:
name = data.get(CONF_NAME)
modaddr = int(data.get(CONF_MODADDR, DEFAULT_MODADDR))
connaddr = int(data.get(CONF_CONNADDR, DEFAULT_CONNADDR))
ir_count = int(data.get(CONF_IR_COUNT, DEFAULT_IR_COUNT))
cmddatas = ""
for cmd in data.get(CONF_COMMANDS):
cmdname = cmd[CONF_NAME].strip()
if not cmdname:
cmdname = '""'
cmddata = cmd[CONF_DATA].strip()
if not cmddata:
cmddata = '""'
cmddatas += f"{cmdname}\n{cmddata}\n"
itachip2ir.addDevice(name, modaddr, connaddr, cmddatas)
devices.append(ITachIP2IRRemote(itachip2ir, name, ir_count))
add_entities(devices, True) |
Set up the iTunes platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the iTunes platform."""
add_entities(
[
ItunesDevice(
config.get(CONF_NAME),
config.get(CONF_HOST),
config.get(CONF_PORT),
config[CONF_SSL],
add_entities,
)
]
) |
Fetch item from Jellyfin server. | def fetch_item(client: JellyfinClient, item_id: str) -> dict[str, Any] | None:
"""Fetch item from Jellyfin server."""
result = client.jellyfin.get_item(item_id)
if not result:
return None
item: dict[str, Any] = result
return item |
Fetch items from Jellyfin server. | def fetch_items(
client: JellyfinClient,
params: dict[str, Any],
) -> list[dict[str, Any]] | None:
"""Fetch items from Jellyfin server."""
result = client.jellyfin.user_items(params=params)
if not result or "Items" not in result or len(result["Items"]) < 1:
return None
items: list[dict[str, Any]] = result["Items"]
return [
item
for item in items
if not item.get("IsFolder")
or (item.get("IsFolder") and item.get("ChildCount", 1) > 0)
] |
Create a new Jellyfin client. | def create_client(device_id: str, device_name: str | None = None) -> JellyfinClient:
"""Create a new Jellyfin client."""
if device_name is None:
device_name = socket.gethostname()
jellyfin = Jellyfin()
client = jellyfin.get_client()
client.config.app(USER_APP_NAME, CLIENT_VERSION, device_name, device_id)
client.config.http(USER_AGENT)
return client |
Connect to the Jellyfin server and assert that the user can login. | def _connect(
client: JellyfinClient, url: str, username: str, password: str
) -> tuple[str, dict[str, Any]]:
"""Connect to the Jellyfin server and assert that the user can login."""
client.config.data["auth.ssl"] = url.startswith("https")
connect_result = _connect_to_address(client.auth, url)
_login(client.auth, url, username, password)
return (_get_user_id(client.jellyfin), connect_result) |
Connect to the Jellyfin server. | def _connect_to_address(
connection_manager: ConnectionManager, url: str
) -> dict[str, Any]:
"""Connect to the Jellyfin server."""
result: dict[str, Any] = connection_manager.connect_to_address(url)
if result["State"] != CONNECTION_STATE["ServerSignIn"]:
raise CannotConnect
return result |
Assert that the user can log in to the Jellyfin server. | def _login(
connection_manager: ConnectionManager,
url: str,
username: str,
password: str,
) -> None:
"""Assert that the user can log in to the Jellyfin server."""
response = connection_manager.login(url, username, password)
if "AccessToken" not in response:
raise InvalidAuth |
Set the unique userid from a Jellyfin server. | def _get_user_id(api: API) -> str:
"""Set the unique userid from a Jellyfin server."""
settings: dict[str, Any] = api.get_user_settings()
userid: str = settings["Id"]
return userid |
Find a suitable thumbnail for an item. | def get_artwork_url(
client: JellyfinClient, item: dict[str, Any], max_width: int = 600
) -> str | None:
"""Find a suitable thumbnail for an item."""
artwork_id: str = item["Id"]
artwork_type = "Primary"
parent_backdrop_id: str | None = item.get("ParentBackdropItemId")
if "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]:
artwork_type = "Backdrop"
elif parent_backdrop_id:
artwork_type = "Backdrop"
artwork_id = parent_backdrop_id
elif "Primary" not in item[ITEM_KEY_IMAGE_TAGS]:
return None
return str(client.jellyfin.artwork(artwork_id, artwork_type, max_width)) |
Generate a random UUID4 string to identify ourselves. | def _generate_client_device_id() -> str:
"""Generate a random UUID4 string to identify ourselves."""
return random_uuid_hex() |
Return the mime type of a media item. | def _media_mime_type(media_item: dict[str, Any]) -> str | None:
"""Return the mime type of a media item."""
if not media_item.get(ITEM_KEY_MEDIA_SOURCES):
_LOGGER.debug("Unable to determine mime type for item without media source")
return None
media_source = media_item[ITEM_KEY_MEDIA_SOURCES][0]
if MEDIA_SOURCE_KEY_PATH not in media_source:
_LOGGER.debug("Unable to determine mime type for media source without path")
return None
path = media_source[MEDIA_SOURCE_KEY_PATH]
mime_type, _ = mimetypes.guess_type(path)
if mime_type is None:
_LOGGER.debug(
"Unable to determine mime type for path %s", os.path.basename(path)
)
return mime_type |
Count the number of now playing. | def _count_now_playing(data: JellyfinDataT) -> int:
"""Count the number of now playing."""
session_ids = [
sid for (sid, session) in data.items() if "NowPlayingItem" in session
]
return len(session_ids) |
Create a prefix for unique ids. | def get_unique_prefix(
location: Location,
language: str,
candle_lighting_offset: int | None,
havdalah_offset: int | None,
) -> str:
"""Create a prefix for unique ids."""
config_properties = [
location.latitude,
location.longitude,
location.timezone,
location.altitude,
location.diaspora,
language,
candle_lighting_offset,
havdalah_offset,
]
prefix = "_".join(map(str, config_properties))
return f"{prefix}" |
Get the Join notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> JoinNotificationService | None:
"""Get the Join notification service."""
api_key = config.get(CONF_API_KEY)
device_id = config.get(CONF_DEVICE_ID)
device_ids = config.get(CONF_DEVICE_IDS)
device_names = config.get(CONF_DEVICE_NAMES)
if api_key and not get_devices(api_key):
_LOGGER.error("Error connecting to Join. Check the API key")
return None
if device_id is None and device_ids is None and device_names is None:
_LOGGER.error(
"No device was provided. Please specify device_id"
", device_ids, or device_names"
)
return None
return JoinNotificationService(api_key, device_id, device_ids, device_names) |
Register services for each join device listed. | def register_device(hass, api_key, name, device_id, device_ids, device_names):
"""Register services for each join device listed."""
def ring_service(service: ServiceCall) -> None:
"""Service to ring devices."""
ring_device(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
)
def set_wallpaper_service(service: ServiceCall) -> None:
"""Service to set wallpaper on devices."""
set_wallpaper(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_file_service(service: ServiceCall) -> None:
"""Service to send files to devices."""
send_file(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_url_service(service: ServiceCall) -> None:
"""Service to open url on devices."""
send_url(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
url=service.data.get("url"),
)
def send_tasker_service(service: ServiceCall) -> None:
"""Service to open url on devices."""
send_notification(
api_key=api_key,
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
text=service.data.get("command"),
)
def send_sms_service(service: ServiceCall) -> None:
"""Service to send sms from devices."""
send_sms(
device_id=device_id,
device_ids=device_ids,
device_names=device_names,
sms_number=service.data.get("number"),
sms_text=service.data.get("message"),
api_key=api_key,
)
hass.services.register(DOMAIN, f"{name}ring", ring_service)
hass.services.register(DOMAIN, f"{name}set_wallpaper", set_wallpaper_service)
hass.services.register(DOMAIN, f"{name}send_sms", send_sms_service)
hass.services.register(DOMAIN, f"{name}send_file", send_file_service)
hass.services.register(DOMAIN, f"{name}send_url", send_url_service)
hass.services.register(DOMAIN, f"{name}send_tasker", send_tasker_service) |
Set up the Join services. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Join services."""
for device in config[DOMAIN]:
api_key = device.get(CONF_API_KEY)
device_id = device.get(CONF_DEVICE_ID)
device_ids = device.get(CONF_DEVICE_IDS)
device_names = device.get(CONF_DEVICE_NAMES)
name = device.get(CONF_NAME)
name = f"{name.lower().replace(' ', '_')}_" if name else ""
if api_key and not get_devices(api_key):
_LOGGER.error("Error connecting to Join, check API key")
return False
if device_id is None and device_ids is None and device_names is None:
_LOGGER.error(
"No device was provided. Please specify device_id"
", device_ids, or device_names"
)
return False
register_device(hass, api_key, name, device_id, device_ids, device_names)
return True |
Set up Kankun Wifi switches. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities_callback: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up Kankun Wifi switches."""
switches = config.get("switches", {})
devices = []
for dev_name, properties in switches.items():
devices.append(
KankunSwitch(
hass,
properties.get(CONF_NAME, dev_name),
properties.get(CONF_HOST),
properties.get(CONF_PORT, DEFAULT_PORT),
properties.get(CONF_PATH, DEFAULT_PATH),
properties.get(CONF_USERNAME),
properties.get(CONF_PASSWORD),
)
)
add_entities_callback(devices) |
Update tracked device state from the hub. | def update_items(router: KeeneticRouter, async_add_entities, tracked: set[str]):
"""Update tracked device state from the hub."""
new_tracked: list[KeeneticTracker] = []
for mac, device in router.last_devices.items():
if mac not in tracked:
tracked.add(mac)
new_tracked.append(KeeneticTracker(device, router))
async_add_entities(new_tracked) |
Populate default options. | def async_add_defaults(hass: HomeAssistant, entry: ConfigEntry):
"""Populate default options."""
host: str = entry.data[CONF_HOST]
imported_options: dict = hass.data[DOMAIN].get(f"imported_options_{host}", {})
options = {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_CONSIDER_HOME: DEFAULT_CONSIDER_HOME,
CONF_INTERFACES: [DEFAULT_INTERFACE],
CONF_TRY_HOTSPOT: True,
CONF_INCLUDE_ARP: True,
CONF_INCLUDE_ASSOCIATED: True,
**imported_options,
**entry.options,
}
if options.keys() - entry.options.keys():
hass.config_entries.async_update_entry(entry, options=options) |
Get the 'mode' used to retrieve the MAC address. | def get_ip_mode(host):
"""Get the 'mode' used to retrieve the MAC address."""
try:
ip_address = ipaddress.ip_address(host)
except ValueError:
return "hostname"
if ip_address.version == 6:
return "ip6"
return "ip" |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
description.device_class
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Listen for keyboard events. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Listen for keyboard events."""
keyboard = PyKeyboard()
keyboard.special_key_assignment()
hass.services.register(
DOMAIN,
SERVICE_VOLUME_UP,
lambda service: keyboard.tap_key(keyboard.volume_up_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_VOLUME_DOWN,
lambda service: keyboard.tap_key(keyboard.volume_down_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_VOLUME_MUTE,
lambda service: keyboard.tap_key(keyboard.volume_mute_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_PLAY_PAUSE,
lambda service: keyboard.tap_key(keyboard.media_play_pause_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_NEXT_TRACK,
lambda service: keyboard.tap_key(keyboard.media_next_track_key),
schema=TAP_KEY_SCHEMA,
)
hass.services.register(
DOMAIN,
SERVICE_MEDIA_PREVIOUS_TRACK,
lambda service: keyboard.tap_key(keyboard.media_prev_track_key),
schema=TAP_KEY_SCHEMA,
)
return True |
Convert a Bluetooth address to a short address. | def short_address(address: str) -> str:
"""Convert a Bluetooth address to a short address."""
results = address.replace("-", ":").split(":")
return f"{results[0].upper()}{results[1].upper()}"[0:4] |
Get the name from a discovery. | def name_from_discovery(discovery: MicroBotAdvertisement) -> str:
"""Get the name from a discovery."""
return f'{discovery.data["local_name"]} {short_address(discovery.address)}' |
Set up the Kira platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Kira platform."""
if discovery_info:
name = discovery_info.get(CONF_NAME)
device = discovery_info.get(CONF_DEVICE)
kira = hass.data[DOMAIN][CONF_REMOTE][name]
add_entities([KiraRemote(device, kira)]) |
Set up a Kira sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a Kira sensor."""
if discovery_info is not None:
name = discovery_info.get(CONF_NAME)
device = discovery_info.get(CONF_DEVICE)
kira = hass.data[DOMAIN][CONF_SENSOR][name]
add_entities([KiraReceiver(device, kira)]) |
Load KIRA codes from specified file. | def load_codes(path):
"""Load KIRA codes from specified file."""
codes = []
if os.path.exists(path):
with open(path, encoding="utf8") as code_file:
data = yaml.safe_load(code_file) or []
for code in data:
try:
codes.append(CODE_SCHEMA(code))
except VoluptuousError as exception:
# keep going
_LOGGER.warning("KIRA code invalid data: %s", exception)
else:
with open(path, "w", encoding="utf8") as code_file:
code_file.write("")
return codes |
Set up the KIRA component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the KIRA component."""
sensors = config.get(DOMAIN, {}).get(CONF_SENSORS, [])
remotes = config.get(DOMAIN, {}).get(CONF_REMOTES, [])
# If no sensors or remotes were specified, add a sensor
if not (sensors or remotes):
sensors.append({})
codes = load_codes(hass.config.path(CODES_YAML))
hass.data[DOMAIN] = {CONF_SENSOR: {}, CONF_REMOTE: {}}
def load_module(platform, idx, module_conf):
"""Set up the KIRA module and load platform."""
# note: module_name is not the HA device name. it's just a unique name
# to ensure the component and platform can share information
module_name = ("%s_%d" % (DOMAIN, idx)) if idx else DOMAIN
device_name = module_conf.get(CONF_NAME, DOMAIN)
port = module_conf.get(CONF_PORT, DEFAULT_PORT)
host = module_conf.get(CONF_HOST, DEFAULT_HOST)
if platform == CONF_SENSOR:
module = pykira.KiraReceiver(host, port)
module.start()
else:
module = pykira.KiraModule(host, port)
hass.data[DOMAIN][platform][module_name] = module
for code in codes:
code_tuple = (code.get(CONF_NAME), code.get(CONF_DEVICE, STATE_UNKNOWN))
module.registerCode(code_tuple, code.get(CONF_CODE))
discovery.load_platform(
hass, platform, DOMAIN, {"name": module_name, "device": device_name}, config
)
for idx, module_conf in enumerate(sensors):
load_module(CONF_SENSOR, idx, module_conf)
for idx, module_conf in enumerate(remotes):
load_module(CONF_REMOTE, idx, module_conf)
def _stop_kira(_event):
"""Stop the KIRA receiver."""
for receiver in hass.data[DOMAIN][CONF_SENSOR].values():
receiver.stop()
_LOGGER.info("Terminated receivers")
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_kira)
return True |
Create a device. | def async_create_device(
hass: HomeAssistant,
config_entry_id: str,
device_name: str | None,
device_translation_key: str | None,
device_translation_placeholders: dict[str, str] | None,
unique_id: str,
) -> dr.DeviceEntry:
"""Create a device."""
device_registry = dr.async_get(hass)
return device_registry.async_get_or_create(
config_entry_id=config_entry_id,
identifiers={(DOMAIN, unique_id)},
name=device_name,
translation_key=device_translation_key,
translation_placeholders=device_translation_placeholders,
) |
Create some issue registry issues. | def _create_issues(hass: HomeAssistant) -> None:
"""Create some issue registry issues."""
async_create_issue(
hass,
DOMAIN,
"transmogrifier_deprecated",
breaks_in_ha_version="2023.1.1",
is_fixable=False,
learn_more_url="https://en.wiktionary.org/wiki/transmogrifier",
severity=IssueSeverity.WARNING,
translation_key="transmogrifier_deprecated",
)
async_create_issue(
hass,
DOMAIN,
"out_of_blinker_fluid",
breaks_in_ha_version="2023.1.1",
is_fixable=True,
learn_more_url="https://www.youtube.com/watch?v=b9rntRxLlbU",
severity=IssueSeverity.CRITICAL,
translation_key="out_of_blinker_fluid",
)
async_create_issue(
hass,
DOMAIN,
"unfixable_problem",
is_fixable=False,
learn_more_url="https://www.youtube.com/watch?v=dQw4w9WgXcQ",
severity=IssueSeverity.WARNING,
translation_key="unfixable_problem",
)
async_create_issue(
hass,
DOMAIN,
"bad_psu",
is_fixable=True,
learn_more_url="https://www.youtube.com/watch?v=b9rntRxLlbU",
severity=IssueSeverity.CRITICAL,
translation_key="bad_psu",
)
async_create_issue(
hass,
DOMAIN,
"cold_tea",
is_fixable=True,
severity=IssueSeverity.WARNING,
translation_key="cold_tea",
) |
Set up the KIWI lock platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the KIWI lock platform."""
try:
kiwi = KiwiClient(config[CONF_USERNAME], config[CONF_PASSWORD])
except KiwiException as exc:
_LOGGER.error(exc)
return
if not (available_locks := kiwi.get_locks()):
# No locks found; abort setup routine.
_LOGGER.info("No KIWI locks found in your account")
return
add_entities([KiwiLock(lock, kiwi) for lock in available_locks], True) |
Return a KNX Climate device to be used within XKNX. | def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate:
"""Return a KNX Climate device to be used within XKNX."""
climate_mode = XknxClimateMode(
xknx,
name=f"{config[CONF_NAME]} Mode",
group_address_operation_mode=config.get(
ClimateSchema.CONF_OPERATION_MODE_ADDRESS
),
group_address_operation_mode_state=config.get(
ClimateSchema.CONF_OPERATION_MODE_STATE_ADDRESS
),
group_address_controller_status=config.get(
ClimateSchema.CONF_CONTROLLER_STATUS_ADDRESS
),
group_address_controller_status_state=config.get(
ClimateSchema.CONF_CONTROLLER_STATUS_STATE_ADDRESS
),
group_address_controller_mode=config.get(
ClimateSchema.CONF_CONTROLLER_MODE_ADDRESS
),
group_address_controller_mode_state=config.get(
ClimateSchema.CONF_CONTROLLER_MODE_STATE_ADDRESS
),
group_address_operation_mode_protection=config.get(
ClimateSchema.CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS
),
group_address_operation_mode_night=config.get(
ClimateSchema.CONF_OPERATION_MODE_NIGHT_ADDRESS
),
group_address_operation_mode_comfort=config.get(
ClimateSchema.CONF_OPERATION_MODE_COMFORT_ADDRESS
),
group_address_operation_mode_standby=config.get(
ClimateSchema.CONF_OPERATION_MODE_STANDBY_ADDRESS
),
group_address_heat_cool=config.get(ClimateSchema.CONF_HEAT_COOL_ADDRESS),
group_address_heat_cool_state=config.get(
ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS
),
operation_modes=config.get(ClimateSchema.CONF_OPERATION_MODES),
controller_modes=config.get(ClimateSchema.CONF_CONTROLLER_MODES),
)
return XknxClimate(
xknx,
name=config[CONF_NAME],
group_address_temperature=config[ClimateSchema.CONF_TEMPERATURE_ADDRESS],
group_address_target_temperature=config.get(
ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS
),
group_address_target_temperature_state=config[
ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS
],
group_address_setpoint_shift=config.get(
ClimateSchema.CONF_SETPOINT_SHIFT_ADDRESS
),
group_address_setpoint_shift_state=config.get(
ClimateSchema.CONF_SETPOINT_SHIFT_STATE_ADDRESS
),
setpoint_shift_mode=config.get(ClimateSchema.CONF_SETPOINT_SHIFT_MODE),
setpoint_shift_max=config[ClimateSchema.CONF_SETPOINT_SHIFT_MAX],
setpoint_shift_min=config[ClimateSchema.CONF_SETPOINT_SHIFT_MIN],
temperature_step=config[ClimateSchema.CONF_TEMPERATURE_STEP],
group_address_on_off=config.get(ClimateSchema.CONF_ON_OFF_ADDRESS),
group_address_on_off_state=config.get(ClimateSchema.CONF_ON_OFF_STATE_ADDRESS),
on_off_invert=config[ClimateSchema.CONF_ON_OFF_INVERT],
group_address_active_state=config.get(ClimateSchema.CONF_ACTIVE_STATE_ADDRESS),
group_address_command_value_state=config.get(
ClimateSchema.CONF_COMMAND_VALUE_STATE_ADDRESS
),
min_temp=config.get(ClimateSchema.CONF_MIN_TEMP),
max_temp=config.get(ClimateSchema.CONF_MAX_TEMP),
mode=climate_mode,
) |
Return a XKNX DateTime object to be used within XKNX. | def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxDateTime(
xknx,
name=config[CONF_NAME],
broadcast_type="DATE",
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
) |
Return a XKNX DateTime object to be used within XKNX. | def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxDateTime(
xknx,
name=config[CONF_NAME],
broadcast_type="DATETIME",
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
) |
Create exposures from config. | def create_knx_exposure(
hass: HomeAssistant, xknx: XKNX, config: ConfigType
) -> KNXExposeSensor | KNXExposeTime:
"""Create exposures from config."""
expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
exposure: KNXExposeSensor | KNXExposeTime
if (
isinstance(expose_type, str)
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
):
exposure = KNXExposeTime(
xknx=xknx,
config=config,
)
else:
exposure = KNXExposeSensor(
hass,
xknx=xknx,
config=config,
)
return exposure |
Return a KNX Light device to be used within XKNX. | def _create_light(xknx: XKNX, config: ConfigType) -> XknxLight:
"""Return a KNX Light device to be used within XKNX."""
def individual_color_addresses(color: str, feature: str) -> Any | None:
"""Load individual color address list from configuration structure."""
if (
LightSchema.CONF_INDIVIDUAL_COLORS not in config
or color not in config[LightSchema.CONF_INDIVIDUAL_COLORS]
):
return None
return config[LightSchema.CONF_INDIVIDUAL_COLORS][color].get(feature)
group_address_tunable_white = None
group_address_tunable_white_state = None
group_address_color_temp = None
group_address_color_temp_state = None
color_temperature_type = ColorTemperatureType.UINT_2_BYTE
if config[LightSchema.CONF_COLOR_TEMP_MODE] == ColorTempModes.RELATIVE:
group_address_tunable_white = config.get(LightSchema.CONF_COLOR_TEMP_ADDRESS)
group_address_tunable_white_state = config.get(
LightSchema.CONF_COLOR_TEMP_STATE_ADDRESS
)
else:
# absolute uint or float
group_address_color_temp = config.get(LightSchema.CONF_COLOR_TEMP_ADDRESS)
group_address_color_temp_state = config.get(
LightSchema.CONF_COLOR_TEMP_STATE_ADDRESS
)
if config[LightSchema.CONF_COLOR_TEMP_MODE] == ColorTempModes.ABSOLUTE_FLOAT:
color_temperature_type = ColorTemperatureType.FLOAT_2_BYTE
return XknxLight(
xknx,
name=config[CONF_NAME],
group_address_switch=config.get(KNX_ADDRESS),
group_address_switch_state=config.get(LightSchema.CONF_STATE_ADDRESS),
group_address_brightness=config.get(LightSchema.CONF_BRIGHTNESS_ADDRESS),
group_address_brightness_state=config.get(
LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
group_address_color=config.get(LightSchema.CONF_COLOR_ADDRESS),
group_address_color_state=config.get(LightSchema.CONF_COLOR_STATE_ADDRESS),
group_address_rgbw=config.get(LightSchema.CONF_RGBW_ADDRESS),
group_address_rgbw_state=config.get(LightSchema.CONF_RGBW_STATE_ADDRESS),
group_address_hue=config.get(LightSchema.CONF_HUE_ADDRESS),
group_address_hue_state=config.get(LightSchema.CONF_HUE_STATE_ADDRESS),
group_address_saturation=config.get(LightSchema.CONF_SATURATION_ADDRESS),
group_address_saturation_state=config.get(
LightSchema.CONF_SATURATION_STATE_ADDRESS
),
group_address_xyy_color=config.get(LightSchema.CONF_XYY_ADDRESS),
group_address_xyy_color_state=config.get(LightSchema.CONF_XYY_STATE_ADDRESS),
group_address_tunable_white=group_address_tunable_white,
group_address_tunable_white_state=group_address_tunable_white_state,
group_address_color_temperature=group_address_color_temp,
group_address_color_temperature_state=group_address_color_temp_state,
group_address_switch_red=individual_color_addresses(
LightSchema.CONF_RED, KNX_ADDRESS
),
group_address_switch_red_state=individual_color_addresses(
LightSchema.CONF_RED, LightSchema.CONF_STATE_ADDRESS
),
group_address_brightness_red=individual_color_addresses(
LightSchema.CONF_RED, LightSchema.CONF_BRIGHTNESS_ADDRESS
),
group_address_brightness_red_state=individual_color_addresses(
LightSchema.CONF_RED, LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
group_address_switch_green=individual_color_addresses(
LightSchema.CONF_GREEN, KNX_ADDRESS
),
group_address_switch_green_state=individual_color_addresses(
LightSchema.CONF_GREEN, LightSchema.CONF_STATE_ADDRESS
),
group_address_brightness_green=individual_color_addresses(
LightSchema.CONF_GREEN, LightSchema.CONF_BRIGHTNESS_ADDRESS
),
group_address_brightness_green_state=individual_color_addresses(
LightSchema.CONF_GREEN, LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
group_address_switch_blue=individual_color_addresses(
LightSchema.CONF_BLUE, KNX_ADDRESS
),
group_address_switch_blue_state=individual_color_addresses(
LightSchema.CONF_BLUE, LightSchema.CONF_STATE_ADDRESS
),
group_address_brightness_blue=individual_color_addresses(
LightSchema.CONF_BLUE, LightSchema.CONF_BRIGHTNESS_ADDRESS
),
group_address_brightness_blue_state=individual_color_addresses(
LightSchema.CONF_BLUE, LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
group_address_switch_white=individual_color_addresses(
LightSchema.CONF_WHITE, KNX_ADDRESS
),
group_address_switch_white_state=individual_color_addresses(
LightSchema.CONF_WHITE, LightSchema.CONF_STATE_ADDRESS
),
group_address_brightness_white=individual_color_addresses(
LightSchema.CONF_WHITE, LightSchema.CONF_BRIGHTNESS_ADDRESS
),
group_address_brightness_white_state=individual_color_addresses(
LightSchema.CONF_WHITE, LightSchema.CONF_BRIGHTNESS_STATE_ADDRESS
),
color_temperature_type=color_temperature_type,
min_kelvin=config[LightSchema.CONF_MIN_KELVIN],
max_kelvin=config[LightSchema.CONF_MAX_KELVIN],
) |
Return a KNX Notification to be used within XKNX. | def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotification:
"""Return a KNX Notification to be used within XKNX."""
return XknxNotification(
xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
value_type=config[CONF_TYPE],
) |
Return a KNX NumericValue to be used within XKNX. | def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue:
"""Return a KNX NumericValue to be used within XKNX."""
return NumericValue(
xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
) |
Convert GroupAddress dict value into GroupAddressInfo instance. | def _create_group_address_info(ga_model: GroupAddressModel) -> GroupAddressInfo:
"""Convert GroupAddress dict value into GroupAddressInfo instance."""
dpt = ga_model["dpt"]
transcoder = DPTBase.transcoder_by_dpt(dpt["main"], dpt.get("sub")) if dpt else None
return GroupAddressInfo(
address=ga_model["address"],
name=ga_model["name"],
description=ga_model["description"],
transcoder=transcoder,
dpt_main=dpt["main"] if dpt else None,
dpt_sub=dpt["sub"] if dpt else None,
) |
Create issue for notify service deprecation. | def migrate_notify_issue(hass: HomeAssistant) -> None:
"""Create issue for notify service deprecation."""
ir.async_create_issue(
hass,
DOMAIN,
"migrate_notify",
breaks_in_ha_version="2024.11.0",
issue_domain=Platform.NOTIFY.value,
is_fixable=True,
is_persistent=True,
translation_key="migrate_notify",
severity=ir.IssueSeverity.WARNING,
) |
Validate a number entity configurations dependent on configured value type. | def number_limit_sub_validator(entity_config: OrderedDict) -> OrderedDict:
"""Validate a number entity configurations dependent on configured value type."""
value_type = entity_config[CONF_TYPE]
min_config: float | None = entity_config.get(NumberSchema.CONF_MIN)
max_config: float | None = entity_config.get(NumberSchema.CONF_MAX)
step_config: float | None = entity_config.get(NumberSchema.CONF_STEP)
dpt_class = DPTNumeric.parse_transcoder(value_type)
if dpt_class is None:
raise vol.Invalid(f"'type: {value_type}' is not a valid numeric sensor type.")
# Infinity is not supported by Home Assistant frontend so user defined
# config is required if if xknx DPTNumeric subclass defines it as limit.
if min_config is None and dpt_class.value_min == float("-inf"):
raise vol.Invalid(f"'min' key required for value type '{value_type}'")
if min_config is not None and min_config < dpt_class.value_min:
raise vol.Invalid(
f"'min: {min_config}' undercuts possible minimum"
f" of value type '{value_type}': {dpt_class.value_min}"
)
if max_config is None and dpt_class.value_max == float("inf"):
raise vol.Invalid(f"'max' key required for value type '{value_type}'")
if max_config is not None and max_config > dpt_class.value_max:
raise vol.Invalid(
f"'max: {max_config}' exceeds possible maximum"
f" of value type '{value_type}': {dpt_class.value_max}"
)
if step_config is not None and step_config < dpt_class.resolution:
raise vol.Invalid(
f"'step: {step_config}' undercuts possible minimum step"
f" of value type '{value_type}': {dpt_class.resolution}"
)
return entity_config |
Validate a button entity payload configuration. | def button_payload_sub_validator(entity_config: OrderedDict) -> OrderedDict:
"""Validate a button entity payload configuration."""
if _type := entity_config.get(CONF_TYPE):
_payload = entity_config[ButtonSchema.CONF_VALUE]
if (transcoder := DPTBase.parse_transcoder(_type)) is None:
raise vol.Invalid(f"'type: {_type}' is not a valid sensor type.")
entity_config[CONF_PAYLOAD_LENGTH] = transcoder.payload_length
try:
_dpt_payload = transcoder.to_knx(_payload)
_raw_payload = transcoder.validate_payload(_dpt_payload)
except (ConversionError, CouldNotParseTelegram) as ex:
raise vol.Invalid(
f"'payload: {_payload}' not valid for 'type: {_type}'"
) from ex
entity_config[CONF_PAYLOAD] = int.from_bytes(_raw_payload, byteorder="big")
return entity_config
_payload = entity_config[CONF_PAYLOAD]
_payload_length = entity_config[CONF_PAYLOAD_LENGTH]
if _payload > (max_payload := _max_payload_value(_payload_length)):
raise vol.Invalid(
f"'payload: {_payload}' exceeds possible maximum for "
f"payload_length {_payload_length}: {max_payload}"
)
return entity_config |
Validate a select entity options configuration. | def select_options_sub_validator(entity_config: OrderedDict) -> OrderedDict:
"""Validate a select entity options configuration."""
options_seen = set()
payloads_seen = set()
payload_length = entity_config[CONF_PAYLOAD_LENGTH]
for opt in entity_config[SelectSchema.CONF_OPTIONS]:
option = opt[SelectSchema.CONF_OPTION]
payload = opt[CONF_PAYLOAD]
if payload > (max_payload := _max_payload_value(payload_length)):
raise vol.Invalid(
f"'payload: {payload}' for 'option: {option}' exceeds possible"
f" maximum of 'payload_length: {payload_length}': {max_payload}"
)
if option in options_seen:
raise vol.Invalid(f"duplicate item for 'option' not allowed: {option}")
options_seen.add(option)
if payload in payloads_seen:
raise vol.Invalid(f"duplicate item for 'payload' not allowed: {payload}")
payloads_seen.add(payload)
return entity_config |
Return a KNX RawValue to be used within XKNX. | def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue:
"""Return a KNX RawValue to be used within XKNX."""
return RawValue(
xknx,
name=config[CONF_NAME],
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
) |
Return a KNX sensor to be used within XKNX. | def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor:
"""Return a KNX sensor to be used within XKNX."""
return XknxSensor(
xknx,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[SensorSchema.CONF_SYNC_STATE],
always_callback=config[SensorSchema.CONF_ALWAYS_CALLBACK],
value_type=config[CONF_TYPE],
) |
Register KNX integration services. | def register_knx_services(hass: HomeAssistant) -> None:
"""Register KNX integration services."""
hass.services.async_register(
DOMAIN,
SERVICE_KNX_SEND,
partial(service_send_to_knx_bus, hass),
schema=SERVICE_KNX_SEND_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_KNX_READ,
partial(service_read_to_knx_bus, hass),
schema=SERVICE_KNX_READ_SCHEMA,
)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_KNX_EVENT_REGISTER,
partial(service_event_register_modify, hass),
schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA,
)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_KNX_EXPOSURE_REGISTER,
partial(service_exposure_register_modify, hass),
schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA,
)
async_register_admin_service(
hass,
DOMAIN,
SERVICE_RELOAD,
partial(service_reload_integration, hass),
) |
Return KNXModule instance. | def get_knx_module(hass: HomeAssistant) -> KNXModule:
"""Return KNXModule instance."""
try:
return hass.data[DOMAIN] # type: ignore[no-any-return]
except KeyError as err:
raise HomeAssistantError("KNX entry not loaded") from err |
Return a KNX Notification to be used within XKNX. | def _create_notification(xknx: XKNX, config: ConfigType) -> XknxNotification:
"""Return a KNX Notification to be used within XKNX."""
return XknxNotification(
xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
) |
Return a XKNX DateTime object to be used within XKNX. | def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime:
"""Return a XKNX DateTime object to be used within XKNX."""
return XknxDateTime(
xknx,
name=config[CONF_NAME],
broadcast_type="TIME",
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
) |
Validate that value is parsable as given sensor type. | def dpt_subclass_validator(dpt_base_class: type[DPTBase]) -> Callable[[Any], str | int]:
"""Validate that value is parsable as given sensor type."""
def dpt_value_validator(value: Any) -> str | int:
"""Validate that value is parsable as sensor type."""
if (
isinstance(value, (str, int))
and dpt_base_class.parse_transcoder(value) is not None
):
return value
raise vol.Invalid(
f"type '{value}' is not a valid DPT identifier for"
f" {dpt_base_class.__name__}."
)
return dpt_value_validator |
Validate that value is parsable as GroupAddress or InternalGroupAddress. | def ga_validator(value: Any) -> str | int:
"""Validate that value is parsable as GroupAddress or InternalGroupAddress."""
if not isinstance(value, (str, int)):
raise vol.Invalid(
f"'{value}' is not a valid KNX group address: Invalid type '{type(value).__name__}'"
)
try:
parse_device_group_address(value)
except CouldNotParseAddress as exc:
raise vol.Invalid(
f"'{value}' is not a valid KNX group address: {exc.message}"
) from exc
return value |
Validate that value is parsable as IPv4 address.
Optionally check if address is in a reserved multicast block or is explicitly not. | def ip_v4_validator(value: Any, multicast: bool | None = None) -> str:
"""Validate that value is parsable as IPv4 address.
Optionally check if address is in a reserved multicast block or is explicitly not.
"""
try:
address = ipaddress.IPv4Address(value)
except ipaddress.AddressValueError as ex:
raise vol.Invalid(f"value '{value}' is not a valid IPv4 address: {ex}") from ex
if multicast is not None and address.is_multicast != multicast:
raise vol.Invalid(
f"value '{value}' is not a valid IPv4"
f" {'multicast' if multicast else 'unicast'} address"
)
return str(address) |
Return a KNX weather device to be used within XKNX. | def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather:
"""Return a KNX weather device to be used within XKNX."""
return XknxWeather(
xknx,
name=config[CONF_NAME],
sync_state=config[WeatherSchema.CONF_SYNC_STATE],
group_address_temperature=config[WeatherSchema.CONF_KNX_TEMPERATURE_ADDRESS],
group_address_brightness_south=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS
),
group_address_brightness_east=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_EAST_ADDRESS
),
group_address_brightness_west=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_WEST_ADDRESS
),
group_address_brightness_north=config.get(
WeatherSchema.CONF_KNX_BRIGHTNESS_NORTH_ADDRESS
),
group_address_wind_speed=config.get(WeatherSchema.CONF_KNX_WIND_SPEED_ADDRESS),
group_address_wind_bearing=config.get(
WeatherSchema.CONF_KNX_WIND_BEARING_ADDRESS
),
group_address_rain_alarm=config.get(WeatherSchema.CONF_KNX_RAIN_ALARM_ADDRESS),
group_address_frost_alarm=config.get(
WeatherSchema.CONF_KNX_FROST_ALARM_ADDRESS
),
group_address_wind_alarm=config.get(WeatherSchema.CONF_KNX_WIND_ALARM_ADDRESS),
group_address_day_night=config.get(WeatherSchema.CONF_KNX_DAY_NIGHT_ADDRESS),
group_address_air_pressure=config.get(
WeatherSchema.CONF_KNX_AIR_PRESSURE_ADDRESS
),
group_address_humidity=config.get(WeatherSchema.CONF_KNX_HUMIDITY_ADDRESS),
) |
Handle get info command. | def ws_info(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Handle get info command."""
knx: KNXModule = hass.data[DOMAIN]
_project_info = None
if project_info := knx.project.info:
_project_info = {
"name": project_info["name"],
"last_modified": project_info["last_modified"],
"tool_version": project_info["tool_version"],
"xknxproject_version": project_info["xknxproject_version"],
}
connection.send_result(
msg["id"],
{
"version": knx.xknx.version,
"connected": knx.xknx.connection_manager.connected.is_set(),
"current_address": str(knx.xknx.current_address),
"project": _project_info,
},
) |
Handle get info command of group monitor. | def ws_group_monitor_info(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Handle get info command of group monitor."""
knx: KNXModule = hass.data[DOMAIN]
recent_telegrams = [*knx.telegrams.recent_telegrams]
connection.send_result(
msg["id"],
{
"project_loaded": knx.project.loaded,
"recent_telegrams": recent_telegrams,
},
) |
Subscribe to incoming and outgoing KNX telegrams. | def ws_subscribe_telegram(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict,
) -> None:
"""Subscribe to incoming and outgoing KNX telegrams."""
knx: KNXModule = hass.data[DOMAIN]
@callback
def forward_telegram(telegram: TelegramDict) -> None:
"""Forward telegram to websocket subscription."""
connection.send_event(
msg["id"],
telegram,
)
connection.subscriptions[msg["id"]] = knx.telegrams.async_listen_telegram(
action=forward_telegram,
name="KNX GroupMonitor subscription",
)
connection.send_result(msg["id"]) |
Content filter for media sources. | def media_source_content_filter(item: BrowseMedia) -> bool:
"""Content filter for media sources."""
# Filter out cameras using PNG over MJPEG. They don't work in Kodi.
return not (
item.media_content_id.startswith("media-source://camera/")
and item.media_content_type == "image/png"
) |
Search existing config entries for one matching the host. | def find_matching_config_entries_for_host(hass, host):
"""Search existing config entries for one matching the host."""
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.data[CONF_HOST] == host:
return entry
return None |
Catch command exceptions. | def cmd(
func: Callable[Concatenate[_KodiEntityT, _P], Awaitable[Any]],
) -> Callable[Concatenate[_KodiEntityT, _P], Coroutine[Any, Any, None]]:
"""Catch command exceptions."""
@wraps(func)
async def wrapper(obj: _KodiEntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Wrap all command methods."""
try:
await func(obj, *args, **kwargs)
except (TransportError, ProtocolError) as exc:
# If Kodi is off, we expect calls to fail.
if obj.state == MediaPlayerState.OFF:
log_function = _LOGGER.debug
else:
log_function = _LOGGER.error
log_function(
"Error calling %s on entity %s: %r",
func.__name__,
obj.entity_id,
exc,
)
return wrapper |
Check if valid pin and coerce to string. | def ensure_pin(value):
"""Check if valid pin and coerce to string."""
if value is None:
raise vol.Invalid("pin value is None")
if PIN_TO_ZONE.get(str(value)) is None:
raise vol.Invalid("pin not valid")
return str(value) |
Check if valid zone and coerce to string. | def ensure_zone(value):
"""Check if valid zone and coerce to string."""
if value is None:
raise vol.Invalid("zone value is None")
if str(value) not in ZONES:
raise vol.Invalid("zone not valid")
return str(value) |
Validate zones and reformat for import. | def import_device_validator(config):
"""Validate zones and reformat for import."""
config = copy.deepcopy(config)
io_cfgs = {}
# Replace pins with zones
for conf_platform, conf_io in (
(CONF_BINARY_SENSORS, CONF_IO_BIN),
(CONF_SENSORS, CONF_IO_DIG),
(CONF_SWITCHES, CONF_IO_SWI),
):
for zone in config.get(conf_platform, []):
if zone.get(CONF_PIN):
zone[CONF_ZONE] = PIN_TO_ZONE[zone[CONF_PIN]]
del zone[CONF_PIN]
io_cfgs[zone[CONF_ZONE]] = conf_io
# Migrate config_entry data into default_options structure
config[CONF_IO] = io_cfgs
config[CONF_DEFAULT_OPTIONS] = OPTIONS_SCHEMA(config)
# clean up fields migrated to options
config.pop(CONF_BINARY_SENSORS, None)
config.pop(CONF_SENSORS, None)
config.pop(CONF_SWITCHES, None)
config.pop(CONF_BLINK, None)
config.pop(CONF_DISCOVERY, None)
config.pop(CONF_API_HOST, None)
config.pop(CONF_IO, None)
return config |
Reformat for import. | def import_validator(config):
"""Reformat for import."""
config = copy.deepcopy(config)
# push api_host into device configs
for device in config.get(CONF_DEVICES, []):
device[CONF_API_HOST] = config.get(CONF_API_HOST, "")
return config |
Create the device name for a given tracked asset pair. | def create_device_name(tracked_asset_pair: str) -> str:
"""Create the device name for a given tracked asset pair."""
return f"{tracked_asset_pair.split('/')[0]} {tracked_asset_pair.split('/')[1]}" |
Get a list of tradable asset pairs. | def get_tradable_asset_pairs(kraken_api: KrakenAPI) -> dict[str, str]:
"""Get a list of tradable asset pairs."""
tradable_asset_pairs = {}
asset_pairs_df = kraken_api.get_tradable_asset_pairs()
for pair in zip(
asset_pairs_df.index.values, asset_pairs_df["wsname"], strict=False
):
# Remove darkpools
# https://support.kraken.com/hc/en-us/articles/360001391906-Introducing-the-Kraken-Dark-Pool
if not pair[0].endswith(".d"):
tradable_asset_pairs[pair[1]] = pair[0]
return tradable_asset_pairs |
Set up the KWB component. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the KWB component."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
device = config.get(CONF_DEVICE)
connection_type = config.get(CONF_TYPE)
raw = config.get(CONF_RAW)
client_name = config.get(CONF_NAME)
if connection_type == "serial":
easyfire = kwb.KWBEasyfire(MODE_SERIAL, "", 0, device)
elif connection_type == "tcp":
easyfire = kwb.KWBEasyfire(MODE_TCP, host, port)
else:
return
easyfire.run_thread()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: easyfire.stop_thread())
add_entities(
KWBSensor(easyfire, sensor, client_name)
for sensor in easyfire.get_sensors()
if (sensor.sensor_type != kwb.PROP_SENSOR_RAW)
or (sensor.sensor_type == kwb.PROP_SENSOR_RAW and raw)
) |
Set up the LaCrosse sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the LaCrosse sensors."""
usb_device: str = config[CONF_DEVICE]
baud: int = config[CONF_BAUD]
expire_after: int | None = config.get(CONF_EXPIRE_AFTER)
_LOGGER.debug("%s %s", usb_device, baud)
try:
lacrosse = pylacrosse.LaCrosse(usb_device, baud)
lacrosse.open()
except SerialException as exc:
_LOGGER.warning("Unable to open serial port: %s", exc)
return
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda event: lacrosse.close())
if CONF_JEELINK_LED in config:
lacrosse.led_mode_state(config.get(CONF_JEELINK_LED))
if CONF_FREQUENCY in config:
lacrosse.set_frequency(config.get(CONF_FREQUENCY))
if CONF_DATARATE in config:
lacrosse.set_datarate(config.get(CONF_DATARATE))
if CONF_TOGGLE_INTERVAL in config:
lacrosse.set_toggle_interval(config.get(CONF_TOGGLE_INTERVAL))
if CONF_TOGGLE_MASK in config:
lacrosse.set_toggle_mask(config.get(CONF_TOGGLE_MASK))
lacrosse.start_scan()
sensors: list[LaCrosseSensor] = []
for device, device_config in config[CONF_SENSORS].items():
_LOGGER.debug("%s %s", device, device_config)
typ: str = device_config[CONF_TYPE]
sensor_class = TYPE_CLASSES[typ]
name: str = device_config.get(CONF_NAME, device)
sensors.append(
sensor_class(hass, lacrosse, device, name, expire_after, device_config)
)
add_entities(sensors) |
Get the value of a sensor field. | def get_value(sensor: Sensor, field: str) -> float | int | str | None:
"""Get the value of a sensor field."""
field_data = sensor.data.get(field)
if field_data is None:
return None
value = field_data["values"][-1]["s"]
try:
value = float(value)
except ValueError:
return str(value) # handle non-numericals
return int(value) if value.is_integer() else value |
Decorate LaMetric calls to handle LaMetric exceptions.
A decorator that wraps the passed in function, catches LaMetric errors,
and handles the availability of the device in the data coordinator. | def lametric_exception_handler(
func: Callable[Concatenate[_LaMetricEntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_LaMetricEntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate LaMetric calls to handle LaMetric exceptions.
A decorator that wraps the passed in function, catches LaMetric errors,
and handles the availability of the device in the data coordinator.
"""
async def handler(
self: _LaMetricEntityT, *args: _P.args, **kwargs: _P.kwargs
) -> None:
try:
await func(self, *args, **kwargs)
self.coordinator.async_update_listeners()
except LaMetricConnectionError as error:
self.coordinator.last_update_success = False
self.coordinator.async_update_listeners()
raise HomeAssistantError(
"Error communicating with the LaMetric device"
) from error
except LaMetricError as error:
raise HomeAssistantError(
"Invalid response from the LaMetric device"
) from error
return handler |
Get the LaMetric coordinator for this device ID. | def async_get_coordinator_by_device_id(
hass: HomeAssistant, device_id: str
) -> LaMetricDataUpdateCoordinator:
"""Get the LaMetric coordinator for this device ID."""
device_registry = dr.async_get(hass)
if (device_entry := device_registry.async_get(device_id)) is None:
raise ValueError(f"Unknown LaMetric device ID: {device_id}")
for entry_id in device_entry.config_entries:
if (
(entry := hass.config_entries.async_get_entry(entry_id))
and entry.domain == DOMAIN
and entry.entry_id in hass.data[DOMAIN]
):
coordinator: LaMetricDataUpdateCoordinator = hass.data[DOMAIN][
entry.entry_id
]
return coordinator
raise ValueError(f"No coordinator for device ID: {device_id}") |
Set up services for the LaMetric integration. | def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for the LaMetric integration."""
async def _async_service_chart(call: ServiceCall) -> None:
"""Send a chart to a LaMetric device."""
coordinator = async_get_coordinator_by_device_id(
hass, call.data[CONF_DEVICE_ID]
)
await async_send_notification(
coordinator, call, [Chart(data=call.data[CONF_DATA])]
)
async def _async_service_message(call: ServiceCall) -> None:
"""Send a message to a LaMetric device."""
coordinator = async_get_coordinator_by_device_id(
hass, call.data[CONF_DEVICE_ID]
)
await async_send_notification(
coordinator,
call,
[
Simple(
icon=call.data.get(CONF_ICON),
text=call.data[CONF_MESSAGE],
)
],
)
hass.services.async_register(
DOMAIN,
SERVICE_CHART,
_async_service_chart,
schema=SERVICE_CHART_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_MESSAGE,
_async_service_message,
schema=SERVICE_MESSAGE_SCHEMA,
) |
Get the Lannouncer notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> LannouncerNotificationService:
"""Get the Lannouncer notification service."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
return LannouncerNotificationService(hass, host, port) |
Get and validate lastFM User. | def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]:
"""Get and validate lastFM User."""
user = LastFMNetwork(api_key=api_key).get_user(username)
errors = {}
try:
user.get_playcount()
except WSError as error:
if error.details == "User not found":
errors["base"] = "invalid_account"
elif (
error.details
== "Invalid API key - You must be granted a valid key by last.fm"
):
errors["base"] = "invalid_auth"
else:
errors["base"] = "unknown"
except Exception: # pylint:disable=broad-except
errors["base"] = "unknown"
return user, errors |
Validate list of users. Return tuple of valid users and errors. | def validate_lastfm_users(
api_key: str, usernames: list[str]
) -> tuple[list[str], dict[str, str]]:
"""Validate list of users. Return tuple of valid users and errors."""
valid_users = []
errors = {}
for username in usernames:
_, lastfm_errors = get_lastfm_user(api_key, username)
if lastfm_errors:
errors = lastfm_errors
else:
valid_users.append(username)
return valid_users, errors |
Format the track. | def format_track(track: Track | None) -> str | None:
"""Format the track."""
if track is None:
return None
return f"{track.artist} - {track.title}" |
Set up an entity for this domain. | def create_lcn_binary_sensor_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS:
return LcnRegulatorLockSensor(
entity_config, config_entry.entry_id, device_connection
)
if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS:
return LcnBinarySensor(entity_config, config_entry.entry_id, device_connection)
# in KEY
return LcnLockKeysSensor(entity_config, config_entry.entry_id, device_connection) |
Set up an entity for this domain. | def create_lcn_climate_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
return LcnClimate(entity_config, config_entry.entry_id, device_connection) |
Check config entries for already configured entries based on the ip address/port. | def get_config_entry(hass: HomeAssistant, data: ConfigType) -> ConfigEntry | None:
"""Check config entries for already configured entries based on the ip address/port."""
return next(
(
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data[CONF_IP_ADDRESS] == data[CONF_IP_ADDRESS]
and entry.data[CONF_PORT] == data[CONF_PORT]
),
None,
) |
Set up an entity for this domain. | def create_lcn_cover_entity(
hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry
) -> LcnEntity:
"""Set up an entity for this domain."""
device_connection = get_device_connection(
hass, entity_config[CONF_ADDRESS], config_entry
)
if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS":
return LcnOutputsCover(entity_config, config_entry.entry_id, device_connection)
# in RELAYS
return LcnRelayCover(entity_config, config_entry.entry_id, device_connection) |
Return a lcn device_connection. | def get_device_connection(
hass: HomeAssistant, address: AddressType, config_entry: ConfigEntry
) -> DeviceConnectionType | None:
"""Return a lcn device_connection."""
host_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION]
addr = pypck.lcn_addr.LcnAddr(*address)
return host_connection.get_address_conn(addr) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.