response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Return a name for the device. | def async_name(
service_info: bluetooth.BluetoothServiceInfoBleak,
ibeacon_advertisement: iBeaconAdvertisement,
unique_address: bool = False,
) -> str:
"""Return a name for the device."""
if service_info.address in (
service_info.name,
service_info.name.replace("-", ":"),
):
base_name = f"{ibeacon_advertisement.uuid}_{ibeacon_advertisement.major}_{ibeacon_advertisement.minor}"
else:
base_name = service_info.name
if unique_address:
short_address = make_short_address(service_info.address)
if not base_name.upper().endswith(short_address):
return f"{base_name} {short_address}"
return base_name |
Dispatch an update. | def _async_dispatch_update(
hass: HomeAssistant,
device_id: str,
service_info: bluetooth.BluetoothServiceInfoBleak,
ibeacon_advertisement: iBeaconAdvertisement,
new: bool,
unique_address: bool,
) -> None:
"""Dispatch an update."""
if new:
async_dispatcher_send(
hass,
SIGNAL_IBEACON_DEVICE_NEW,
device_id,
async_name(service_info, ibeacon_advertisement, unique_address),
ibeacon_advertisement,
)
return
async_dispatcher_send(
hass,
signal_seen(device_id),
ibeacon_advertisement,
) |
Add new tracker entities from the account. | def add_entities(account: IcloudAccount, async_add_entities, tracked):
"""Add new tracker entities from the account."""
new_tracked = []
for dev_id, device in account.devices.items():
if dev_id in tracked or device.location is None:
continue
new_tracked.append(IcloudTrackerEntity(account, device))
tracked.add(dev_id)
async_add_entities(new_tracked, True) |
Return an icon for the device. | def icon_for_icloud_device(icloud_device: IcloudDevice) -> str:
"""Return an icon for the device."""
switcher = {
"iPad": "mdi:tablet",
"iPhone": "mdi:cellphone",
"iPod": "mdi:ipod",
"iMac": "mdi:monitor",
"MacBookPro": "mdi:laptop",
}
return switcher.get(icloud_device.device_class, "mdi:cellphone-link") |
Add new tracker entities from the account. | def add_entities(account, async_add_entities, tracked):
"""Add new tracker entities from the account."""
new_tracked = []
for dev_id, device in account.devices.items():
if dev_id in tracked or device.battery_level is None:
continue
new_tracked.append(IcloudDeviceBatterySensor(account, device))
tracked.add(dev_id)
async_add_entities(new_tracked, True) |
Set up the IDTECK proximity card component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the IDTECK proximity card component."""
conf = config[DOMAIN]
for unit in conf:
host = unit[CONF_HOST]
port = unit[CONF_PORT]
name = unit[CONF_NAME]
try:
reader = IdteckReader(hass, host, port, name)
reader.connect()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, reader.stop)
except OSError as error:
_LOGGER.error("Error creating %s. %s", name, error)
return False
return True |
Set up a control panel managed through IFTTT. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a control panel managed through IFTTT."""
if DATA_IFTTT_ALARM not in hass.data:
hass.data[DATA_IFTTT_ALARM] = []
name: str = config[CONF_NAME]
code: str | None = config.get(CONF_CODE)
code_arm_required: bool = config[CONF_CODE_ARM_REQUIRED]
event_away: str = config[CONF_EVENT_AWAY]
event_home: str = config[CONF_EVENT_HOME]
event_night: str = config[CONF_EVENT_NIGHT]
event_disarm: str = config[CONF_EVENT_DISARM]
optimistic: bool = config[CONF_OPTIMISTIC]
alarmpanel = IFTTTAlarmPanel(
name,
code,
code_arm_required,
event_away,
event_home,
event_night,
event_disarm,
optimistic,
)
hass.data[DATA_IFTTT_ALARM].append(alarmpanel)
add_entities([alarmpanel])
async def push_state_update(service: ServiceCall) -> None:
"""Set the service state as device state attribute."""
entity_ids = service.data.get(ATTR_ENTITY_ID)
state = service.data.get(ATTR_STATE)
devices = hass.data[DATA_IFTTT_ALARM]
if entity_ids:
devices = [d for d in devices if d.entity_id in entity_ids]
for device in devices:
device.push_alarm_state(state)
device.async_schedule_update_ha_state()
hass.services.register(
DOMAIN,
SERVICE_PUSH_ALARM_STATE,
push_state_update,
schema=PUSH_ALARM_STATE_SERVICE_SCHEMA,
) |
Set up the iGlo lights. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the iGlo lights."""
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
port = config.get(CONF_PORT)
add_entities([IGloLamp(name, host, port)], True) |
Set up the IGN Sismologia Feed platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the IGN Sismologia Feed platform."""
scan_interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
coordinates: tuple[float, float] = (
config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude),
)
radius_in_km: float = config[CONF_RADIUS]
minimum_magnitude: float = config[CONF_MINIMUM_MAGNITUDE]
# Initialize the entity manager.
feed = IgnSismologiaFeedEntityManager(
hass, add_entities, scan_interval, coordinates, radius_in_km, minimum_magnitude
)
def start_feed_manager(event: Event) -> None:
"""Start feed manager."""
feed.startup()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager) |
Auto setup of IHC products from the IHC project file. | def autosetup_ihc_products(hass: HomeAssistant, config, ihc_controller, controller_id):
"""Auto setup of IHC products from the IHC project file."""
if not (project_xml := ihc_controller.get_project()):
_LOGGER.error("Unable to read project from IHC controller")
return False
project = ElementTree.fromstring(project_xml)
# If an auto setup file exist in the configuration it will override
yaml_path = hass.config.path(AUTO_SETUP_YAML)
if not os.path.isfile(yaml_path):
yaml_path = os.path.join(os.path.dirname(__file__), AUTO_SETUP_YAML)
yaml = load_yaml_config_file(yaml_path)
try:
auto_setup_conf = AUTO_SETUP_SCHEMA(yaml)
except vol.Invalid as exception:
_LOGGER.error("Invalid IHC auto setup data: %s", exception)
return False
groups = project.findall(".//group")
for platform in IHC_PLATFORMS:
platform_setup = auto_setup_conf[platform]
discovery_info = get_discovery_info(platform_setup, groups, controller_id)
if discovery_info:
discovery.load_platform(hass, platform, DOMAIN, discovery_info, config)
return True |
Get discovery info for specified IHC platform. | def get_discovery_info(platform_setup, groups, controller_id):
"""Get discovery info for specified IHC platform."""
discovery_data = {}
for group in groups:
groupname = group.attrib["name"]
for product_cfg in platform_setup:
products = group.findall(product_cfg[CONF_XPATH])
for product in products:
product_id = int(product.attrib["id"].strip("_"), 0)
nodes = product.findall(product_cfg[CONF_NODE])
for node in nodes:
if "setting" in node.attrib and node.attrib["setting"] == "yes":
continue
ihc_id = int(node.attrib["id"].strip("_"), 0)
name = f"{groupname}_{ihc_id}"
# make the model number look a bit nicer - strip leading _
model = product.get("product_identifier", "").lstrip("_")
device = {
"ihc_id": ihc_id,
"ctrl_id": controller_id,
"product": {
"id": product_id,
"name": product.get("name") or "",
"note": product.get("note") or "",
"position": product.get("position") or "",
"model": model,
"group": groupname,
},
"product_cfg": product_cfg,
}
discovery_data[name] = device
return discovery_data |
Set up the IHC binary sensor platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the IHC binary sensor platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
controller_id = device["ctrl_id"]
ihc_controller: IHCController = hass.data[DOMAIN][controller_id][IHC_CONTROLLER]
sensor = IHCBinarySensor(
ihc_controller,
controller_id,
name,
ihc_id,
product_cfg.get(CONF_TYPE),
product_cfg[CONF_INVERTING],
product,
)
devices.append(sensor)
add_entities(devices) |
Set up the IHC lights platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the IHC lights platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
controller_id = device["ctrl_id"]
ihc_controller: IHCController = hass.data[DOMAIN][controller_id][IHC_CONTROLLER]
ihc_off_id = product_cfg.get(CONF_OFF_ID)
ihc_on_id = product_cfg.get(CONF_ON_ID)
dimmable = product_cfg[CONF_DIMMABLE]
light = IhcLight(
ihc_controller,
controller_id,
name,
ihc_id,
ihc_off_id,
ihc_on_id,
dimmable,
product,
)
devices.append(light)
add_entities(devices) |
Validate the device name. | def validate_name(config):
"""Validate the device name."""
if CONF_NAME in config:
return config
ihcid = config[CONF_ID]
name = f"ihc_{ihcid}"
config[CONF_NAME] = name
return config |
Get manual configuration for IHC devices. | def get_manual_configuration(
hass: HomeAssistant,
config: ConfigType,
controller_conf: ConfigType,
controller_id: str,
) -> None:
"""Get manual configuration for IHC devices."""
for platform in IHC_PLATFORMS:
discovery_info = {}
if platform in controller_conf:
platform_setup = controller_conf.get(platform, {})
for sensor_cfg in platform_setup:
name = sensor_cfg[CONF_NAME]
device = {
"ihc_id": sensor_cfg[CONF_ID],
"ctrl_id": controller_id,
"product": {
"name": name,
"note": sensor_cfg.get(CONF_NOTE) or "",
"position": sensor_cfg.get(CONF_POSITION) or "",
},
"product_cfg": {
"type": sensor_cfg.get(CONF_TYPE),
"inverting": sensor_cfg.get(CONF_INVERTING),
"off_id": sensor_cfg.get(CONF_OFF_ID),
"on_id": sensor_cfg.get(CONF_ON_ID),
"dimmable": sensor_cfg.get(CONF_DIMMABLE),
"unit_of_measurement": sensor_cfg.get(CONF_UNIT_OF_MEASUREMENT),
},
}
discovery_info[name] = device
if discovery_info:
discovery.load_platform(hass, platform, DOMAIN, discovery_info, config) |
Set up the IHC sensor platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the IHC sensor platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
controller_id = device["ctrl_id"]
ihc_controller: IHCController = hass.data[DOMAIN][controller_id][IHC_CONTROLLER]
unit = product_cfg[CONF_UNIT_OF_MEASUREMENT]
sensor = IHCSensor(ihc_controller, controller_id, name, ihc_id, unit, product)
devices.append(sensor)
add_entities(devices) |
Set up the IHC service functions. | def setup_service_functions(hass: HomeAssistant) -> None:
"""Set up the IHC service functions."""
def _get_controller(call):
controller_index = call.data[ATTR_CONTROLLER_ID]
for controller_id in hass.data[DOMAIN]:
controller_conf = hass.data[DOMAIN][controller_id]
if controller_conf[IHC_CONTROLLER_INDEX] == controller_index:
return controller_conf[IHC_CONTROLLER]
# if not found the controller_index is ouf of range
raise ValueError("The controller index is out of range")
async def async_set_runtime_value_bool(call):
"""Set a IHC runtime bool value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller = _get_controller(call)
await async_set_bool(hass, ihc_controller, ihc_id, value)
async def async_set_runtime_value_int(call):
"""Set a IHC runtime integer value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller = _get_controller(call)
await async_set_int(hass, ihc_controller, ihc_id, value)
async def async_set_runtime_value_float(call):
"""Set a IHC runtime float value service function."""
ihc_id = call.data[ATTR_IHC_ID]
value = call.data[ATTR_VALUE]
ihc_controller = _get_controller(call)
await async_set_float(hass, ihc_controller, ihc_id, value)
async def async_pulse_runtime_input(call):
"""Pulse a IHC controller input function."""
ihc_id = call.data[ATTR_IHC_ID]
ihc_controller = _get_controller(call)
await async_pulse(hass, ihc_controller, ihc_id)
hass.services.async_register(
DOMAIN,
SERVICE_SET_RUNTIME_VALUE_BOOL,
async_set_runtime_value_bool,
schema=SET_RUNTIME_VALUE_BOOL_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_SET_RUNTIME_VALUE_INT,
async_set_runtime_value_int,
schema=SET_RUNTIME_VALUE_INT_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_SET_RUNTIME_VALUE_FLOAT,
async_set_runtime_value_float,
schema=SET_RUNTIME_VALUE_FLOAT_SCHEMA,
)
hass.services.async_register(
DOMAIN, SERVICE_PULSE, async_pulse_runtime_input, schema=PULSE_SCHEMA
) |
Set up the IHC switch platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the IHC switch platform."""
if discovery_info is None:
return
devices = []
for name, device in discovery_info.items():
ihc_id = device["ihc_id"]
product_cfg = device["product_cfg"]
product = device["product"]
# Find controller that corresponds with device id
controller_id = device["ctrl_id"]
ihc_controller: IHCController = hass.data[DOMAIN][controller_id][IHC_CONTROLLER]
ihc_off_id = product_cfg.get(CONF_OFF_ID)
ihc_on_id = product_cfg.get(CONF_ON_ID)
switch = IHCSwitch(
ihc_controller, controller_id, name, ihc_id, ihc_off_id, ihc_on_id, product
)
devices.append(switch)
add_entities(devices) |
Set a bool value on an IHC controller resource. | def async_set_bool(
hass: HomeAssistant, ihc_controller: IHCController, ihc_id: int, value: bool
) -> asyncio.Future[bool]:
"""Set a bool value on an IHC controller resource."""
return hass.async_add_executor_job(
ihc_controller.set_runtime_value_bool, ihc_id, value
) |
Set a int value on an IHC controller resource. | def async_set_int(
hass: HomeAssistant, ihc_controller: IHCController, ihc_id: int, value: int
) -> asyncio.Future[bool]:
"""Set a int value on an IHC controller resource."""
return hass.async_add_executor_job(
ihc_controller.set_runtime_value_int, ihc_id, value
) |
Set a float value on an IHC controller resource. | def async_set_float(
hass: HomeAssistant, ihc_controller: IHCController, ihc_id: int, value: float
) -> asyncio.Future[bool]:
"""Set a float value on an IHC controller resource."""
return hass.async_add_executor_job(
ihc_controller.set_runtime_value_float, ihc_id, value
) |
Set up the IHC integration. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the IHC integration."""
conf = config[DOMAIN]
for index, controller_conf in enumerate(conf):
if not ihc_setup(hass, config, controller_conf, index):
return False
return True |
Set up the IHC integration. | def ihc_setup(
hass: HomeAssistant,
config: ConfigType,
controller_conf: ConfigType,
controller_index: int,
) -> bool:
"""Set up the IHC integration."""
url = controller_conf[CONF_URL]
username = controller_conf[CONF_USERNAME]
password = controller_conf[CONF_PASSWORD]
ihc_controller = IHCController(url, username, password)
if not ihc_controller.authenticate():
_LOGGER.error("Unable to authenticate on IHC controller")
return False
controller_id: str = ihc_controller.client.get_system_info()["serial_number"]
# Store controller configuration
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][controller_id] = {
IHC_CONTROLLER: ihc_controller,
CONF_INFO: controller_conf[CONF_INFO],
IHC_CONTROLLER_INDEX: controller_index,
}
if controller_conf[CONF_AUTOSETUP] and not autosetup_ihc_products(
hass, config, ihc_controller, controller_id
):
return False
get_manual_configuration(hass, config, controller_conf, controller_id)
# We only want to register the service functions once for the first controller
if controller_index == 0:
setup_service_functions(hass)
return True |
Validate the assigned content type is one of an image. | def valid_image_content_type(content_type: str | None) -> str:
"""Validate the assigned content type is one of an image."""
if content_type is None or content_type.split("/", 1)[0] != "image":
raise ImageContentTypeError
return content_type |
Generate a size. | def _generate_thumbnail(
original_path: pathlib.Path,
content_type: str,
target_path: pathlib.Path,
target_size: tuple[int, int],
) -> None:
"""Generate a size."""
image = ImageOps.exif_transpose(Image.open(original_path))
image.thumbnail(target_size)
image.save(target_path, format=content_type.partition("/")[-1]) |
Parse image size from the given filename (of the form WIDTHxHEIGHT-filename).
>>> _validate_size_from_filename("100x100-image.png")
(100, 100)
>>> _validate_size_from_filename("jeff.png")
Traceback (most recent call last):
... | def _validate_size_from_filename(filename: str) -> tuple[int, int]:
"""Parse image size from the given filename (of the form WIDTHxHEIGHT-filename).
>>> _validate_size_from_filename("100x100-image.png")
(100, 100)
>>> _validate_size_from_filename("jeff.png")
Traceback (most recent call last):
...
"""
image_size = filename.partition("-")[0]
if not image_size:
raise ValueError("Invalid filename")
width_s, _, height_s = image_size.partition("x")
width = int(width_s)
height = int(height_s)
if not width or width != height or width not in VALID_SIZES:
raise ValueError(f"Invalid size {image_size}")
return (width, height) |
Return diagnostics for a config entry. | def _async_get_diagnostics(
hass: HomeAssistant,
entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
redacted_config = async_redact_data(entry.data, REDACT_CONFIG)
coordinator: ImapDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
return {
"config": redacted_config,
"event": coordinator.diagnostics_data,
} |
Get error message from response. | def raise_on_error(response: Response, translation_key: str) -> None:
"""Get error message from response."""
if response.result != "OK":
error: str = response.lines[0].decode("utf-8")
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key=translation_key,
translation_placeholders={"error": error},
) |
Merge connection details into each configured query. | def _merge_connection_config_into_query(conf, query):
"""Merge connection details into each configured query."""
for key in conf:
if key not in query and key not in [CONF_QUERIES, CONF_QUERIES_FLUX]:
query[key] = conf[key] |
Ensure queries are provided in correct format based on API version. | def validate_query_format_for_version(conf: dict) -> dict:
"""Ensure queries are provided in correct format based on API version."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_QUERIES_FLUX not in conf:
raise vol.Invalid(
f"{CONF_QUERIES_FLUX} is required when {CONF_API_VERSION} is"
f" {API_VERSION_2}"
)
for query in conf[CONF_QUERIES_FLUX]:
_merge_connection_config_into_query(conf, query)
query[CONF_LANGUAGE] = LANGUAGE_FLUX
del conf[CONF_BUCKET]
else:
if CONF_QUERIES not in conf:
raise vol.Invalid(
f"{CONF_QUERIES} is required when {CONF_API_VERSION} is"
f" {DEFAULT_API_VERSION}"
)
for query in conf[CONF_QUERIES]:
_merge_connection_config_into_query(conf, query)
query[CONF_LANGUAGE] = LANGUAGE_INFLUXQL
del conf[CONF_DB_NAME]
return conf |
Set up the InfluxDB component. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the InfluxDB component."""
try:
influx = get_influx_connection(config, test_read=True)
except ConnectionError as exc:
_LOGGER.error(exc)
raise PlatformNotReady from exc
entities = []
if CONF_QUERIES_FLUX in config:
for query in config[CONF_QUERIES_FLUX]:
if query[CONF_BUCKET] in influx.data_repositories:
entities.append(InfluxSensor(hass, influx, query))
else:
_LOGGER.error(NO_BUCKET_ERROR, query[CONF_BUCKET])
else:
for query in config[CONF_QUERIES]:
if query[CONF_DB_NAME] in influx.data_repositories:
entities.append(InfluxSensor(hass, influx, query))
else:
_LOGGER.error(NO_DATABASE_ERROR, query[CONF_DB_NAME])
add_entities(entities, update_before_add=True)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, lambda _: influx.close()) |
Build URL used from config inputs and default when necessary. | def create_influx_url(conf: dict) -> dict:
"""Build URL used from config inputs and default when necessary."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_SSL not in conf:
conf[CONF_SSL] = DEFAULT_SSL_V2
if CONF_HOST not in conf:
conf[CONF_HOST] = DEFAULT_HOST_V2
url = conf[CONF_HOST]
if conf[CONF_SSL]:
url = f"https://{url}"
else:
url = f"http://{url}"
if CONF_PORT in conf:
url = f"{url}:{conf[CONF_PORT]}"
if CONF_PATH in conf:
url = f"{url}{conf[CONF_PATH]}"
conf[CONF_URL] = url
return conf |
Ensure correct config fields are provided based on API version used. | def validate_version_specific_config(conf: dict) -> dict:
"""Ensure correct config fields are provided based on API version used."""
if conf[CONF_API_VERSION] == API_VERSION_2:
if CONF_TOKEN not in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are required when"
f" {CONF_API_VERSION} is {API_VERSION_2}"
)
if CONF_USERNAME in conf:
raise vol.Invalid(
f"{CONF_USERNAME} and {CONF_PASSWORD} are only allowed when"
f" {CONF_API_VERSION} is {DEFAULT_API_VERSION}"
)
elif CONF_TOKEN in conf:
raise vol.Invalid(
f"{CONF_TOKEN} and {CONF_BUCKET} are only allowed when"
f" {CONF_API_VERSION} is {API_VERSION_2}"
)
return conf |
Build event to json converter and add to config. | def _generate_event_to_json(conf: dict) -> Callable[[Event], dict[str, Any] | None]:
"""Build event to json converter and add to config."""
entity_filter = convert_include_exclude_filter(conf)
tags = conf.get(CONF_TAGS)
tags_attributes: list[str] = conf[CONF_TAGS_ATTRIBUTES]
default_measurement = conf.get(CONF_DEFAULT_MEASUREMENT)
measurement_attr: str = conf[CONF_MEASUREMENT_ATTR]
override_measurement = conf.get(CONF_OVERRIDE_MEASUREMENT)
global_ignore_attributes = set(conf[CONF_IGNORE_ATTRIBUTES])
component_config = EntityValues(
conf[CONF_COMPONENT_CONFIG],
conf[CONF_COMPONENT_CONFIG_DOMAIN],
conf[CONF_COMPONENT_CONFIG_GLOB],
)
def event_to_json(event: Event) -> dict[str, Any] | None:
"""Convert event into json in format Influx expects."""
state: State | None = event.data.get(EVENT_NEW_STATE)
if (
state is None
or state.state in (STATE_UNKNOWN, "", STATE_UNAVAILABLE, None)
or not entity_filter(state.entity_id)
):
return None
try:
_include_state = _include_value = False
_state_as_value = float(state.state)
_include_value = True
except ValueError:
try:
_state_as_value = float(state_helper.state_as_number(state))
_include_state = _include_value = True
except ValueError:
_include_state = True
include_uom = True
include_dc = True
entity_config = component_config.get(state.entity_id)
measurement = entity_config.get(CONF_OVERRIDE_MEASUREMENT)
if measurement in (None, ""):
if override_measurement:
measurement = override_measurement
else:
if measurement_attr == "entity_id":
measurement = state.entity_id
elif measurement_attr == "domain__device_class":
device_class = state.attributes.get("device_class")
if device_class is None:
# This entity doesn't have a device_class set, use only domain
measurement = state.domain
else:
measurement = f"{state.domain}__{device_class}"
include_dc = False
else:
measurement = state.attributes.get(measurement_attr)
if measurement in (None, ""):
if default_measurement:
measurement = default_measurement
else:
measurement = state.entity_id
else:
include_uom = measurement_attr != "unit_of_measurement"
json: dict[str, Any] = {
INFLUX_CONF_MEASUREMENT: measurement,
INFLUX_CONF_TAGS: {
CONF_DOMAIN: state.domain,
CONF_ENTITY_ID: state.object_id,
},
INFLUX_CONF_TIME: event.time_fired,
INFLUX_CONF_FIELDS: {},
}
if _include_state:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_STATE] = state.state
if _include_value:
json[INFLUX_CONF_FIELDS][INFLUX_CONF_VALUE] = _state_as_value
ignore_attributes = set(entity_config.get(CONF_IGNORE_ATTRIBUTES, []))
ignore_attributes.update(global_ignore_attributes)
for key, value in state.attributes.items():
if key in tags_attributes:
json[INFLUX_CONF_TAGS][key] = value
elif (
(key != CONF_UNIT_OF_MEASUREMENT or include_uom)
and (key != "device_class" or include_dc)
and key not in ignore_attributes
):
# If the key is already in fields
if key in json[INFLUX_CONF_FIELDS]:
key = f"{key}_"
# Prevent column data errors in influxDB.
# For each value we try to cast it as float
# But if we cannot do it we store the value
# as string add "_str" postfix to the field key
try:
json[INFLUX_CONF_FIELDS][key] = float(value)
except (ValueError, TypeError):
new_key = f"{key}_str"
new_value = str(value)
json[INFLUX_CONF_FIELDS][new_key] = new_value
if RE_DIGIT_TAIL.match(new_value):
json[INFLUX_CONF_FIELDS][key] = float(
RE_DECIMAL.sub("", new_value)
)
# Infinity and NaN are not valid floats in InfluxDB
with suppress(KeyError, TypeError):
if not math.isfinite(json[INFLUX_CONF_FIELDS][key]):
del json[INFLUX_CONF_FIELDS][key]
json[INFLUX_CONF_TAGS].update(tags)
return json
return event_to_json |
Create the correct influx connection for the API version. | def get_influx_connection( # noqa: C901
conf, test_write=False, test_read=False
) -> InfluxClient:
"""Create the correct influx connection for the API version."""
kwargs = {
CONF_TIMEOUT: TIMEOUT,
}
precision = conf.get(CONF_PRECISION)
if conf[CONF_API_VERSION] == API_VERSION_2:
kwargs[CONF_TIMEOUT] = TIMEOUT * 1000
kwargs[CONF_URL] = conf[CONF_URL]
kwargs[CONF_TOKEN] = conf[CONF_TOKEN]
kwargs[INFLUX_CONF_ORG] = conf[CONF_ORG]
kwargs[CONF_VERIFY_SSL] = conf[CONF_VERIFY_SSL]
if CONF_SSL_CA_CERT in conf:
kwargs[CONF_SSL_CA_CERT] = conf[CONF_SSL_CA_CERT]
bucket = conf.get(CONF_BUCKET)
influx = InfluxDBClientV2(**kwargs)
query_api = influx.query_api()
initial_write_mode = SYNCHRONOUS if test_write else ASYNCHRONOUS
write_api = influx.write_api(write_options=initial_write_mode)
def write_v2(json):
"""Write data to V2 influx."""
data = {"bucket": bucket, "record": json}
if precision is not None:
data["write_precision"] = precision
try:
write_api.write(**data)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def query_v2(query, _=None):
"""Query V2 influx."""
try:
return query_api.query(query)
except (urllib3.exceptions.HTTPError, OSError) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except ApiException as exc:
if exc.status == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V2 % exc) from exc
def close_v2():
"""Close V2 influx client."""
influx.close()
buckets = []
if test_write:
# Try to write b"" to influx. If we can connect and creds are valid
# Then invalid inputs is returned. Anything else is a broken config
with suppress(ValueError):
write_v2(b"")
write_api = influx.write_api(write_options=ASYNCHRONOUS)
if test_read:
tables = query_v2(TEST_QUERY_V2)
if tables and tables[0].records:
buckets = [bucket.values["name"] for bucket in tables[0].records]
else:
buckets = []
return InfluxClient(buckets, write_v2, query_v2, close_v2)
# Else it's a V1 client
if CONF_SSL_CA_CERT in conf and conf[CONF_VERIFY_SSL]:
kwargs[CONF_VERIFY_SSL] = conf[CONF_SSL_CA_CERT]
else:
kwargs[CONF_VERIFY_SSL] = conf[CONF_VERIFY_SSL]
if CONF_DB_NAME in conf:
kwargs[CONF_DB_NAME] = conf[CONF_DB_NAME]
if CONF_USERNAME in conf:
kwargs[CONF_USERNAME] = conf[CONF_USERNAME]
if CONF_PASSWORD in conf:
kwargs[CONF_PASSWORD] = conf[CONF_PASSWORD]
if CONF_HOST in conf:
kwargs[CONF_HOST] = conf[CONF_HOST]
if CONF_PATH in conf:
kwargs[CONF_PATH] = conf[CONF_PATH]
if CONF_PORT in conf:
kwargs[CONF_PORT] = conf[CONF_PORT]
if CONF_SSL in conf:
kwargs[CONF_SSL] = conf[CONF_SSL]
influx = InfluxDBClient(**kwargs)
def write_v1(json):
"""Write data to V1 influx."""
try:
influx.write_points(json, time_precision=precision)
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(WRITE_ERROR % (json, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def query_v1(query, database=None):
"""Query V1 influx."""
try:
return list(influx.query(query, database=database).get_points())
except (
requests.exceptions.RequestException,
exceptions.InfluxDBServerError,
OSError,
) as exc:
raise ConnectionError(CONNECTION_ERROR % exc) from exc
except exceptions.InfluxDBClientError as exc:
if exc.code == CODE_INVALID_INPUTS:
raise ValueError(QUERY_ERROR % (query, exc)) from exc
raise ConnectionError(CLIENT_ERROR_V1 % exc) from exc
def close_v1():
"""Close the V1 Influx client."""
influx.close()
databases = []
if test_write:
write_v1([])
if test_read:
databases = [db["name"] for db in query_v1(TEST_QUERY_V1)]
return InfluxClient(databases, write_v1, query_v1, close_v1) |
Set up the InfluxDB component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the InfluxDB component."""
conf = config[DOMAIN]
try:
influx = get_influx_connection(conf, test_write=True)
except ConnectionError as exc:
_LOGGER.error(RETRY_MESSAGE, exc)
event_helper.call_later(
hass, RETRY_INTERVAL, lambda _: _retry_setup(hass, config)
)
return True
event_to_json = _generate_event_to_json(conf)
max_tries = conf.get(CONF_RETRY_COUNT)
instance = hass.data[DOMAIN] = InfluxThread(hass, influx, event_to_json, max_tries)
instance.start()
def shutdown(event):
"""Shut down the thread."""
instance.queue.put(None)
instance.join()
influx.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
return True |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Test if input_boolean is True. | def is_on(hass: HomeAssistant, entity_id: str) -> bool:
"""Test if input_boolean is True."""
return hass.states.is_state(entity_id, STATE_ON) |
Test if string dt is a valid datetime. | def is_valid_datetime(string: str) -> bool:
"""Test if string dt is a valid datetime."""
try:
return dt_util.parse_datetime(string) is not None
except ValueError:
return False |
Test if string dt is a valid date. | def is_valid_date(string: str) -> bool:
"""Test if string dt is a valid date."""
return dt_util.parse_date(string) is not None |
Test if string dt is a valid time. | def is_valid_time(string: str) -> bool:
"""Test if string dt is a valid time."""
return dt_util.parse_time(string) is not None |
Validate set_datetime service attributes. | def validate_set_datetime_attrs(config):
"""Validate set_datetime service attributes."""
has_date_or_time_attr = any(key in config for key in (ATTR_DATE, ATTR_TIME))
if (
sum([has_date_or_time_attr, ATTR_DATETIME in config, ATTR_TIMESTAMP in config])
> 1
):
raise vol.Invalid(f"Cannot use together: {', '.join(config.keys())}")
return config |
Check at least date or time is true. | def has_date_or_time(conf):
"""Check at least date or time is true."""
if conf[CONF_HAS_DATE] or conf[CONF_HAS_TIME]:
return conf
raise vol.Invalid("Entity needs at least a date or a time") |
Check the initial value is valid. | def valid_initial(conf: dict[str, Any]) -> dict[str, Any]:
"""Check the initial value is valid."""
if not (conf.get(CONF_INITIAL)):
return conf
# Ensure we can parse the initial value, raise vol.Invalid on failure
parse_initial_datetime(conf)
return conf |
Check the initial value is valid. | def parse_initial_datetime(conf: dict[str, Any]) -> py_datetime.datetime:
"""Check the initial value is valid."""
initial: str = conf[CONF_INITIAL]
if conf[CONF_HAS_DATE] and conf[CONF_HAS_TIME]:
if (datetime := dt_util.parse_datetime(initial)) is not None:
return datetime
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a datetime")
if conf[CONF_HAS_DATE]:
if (date := dt_util.parse_date(initial)) is not None:
return py_datetime.datetime.combine(date, DEFAULT_TIME)
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a date")
if (time := dt_util.parse_time(initial)) is not None:
return py_datetime.datetime.combine(py_datetime.date.today(), time)
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a time") |
Configure validation helper for input number (voluptuous). | def _cv_input_number(cfg):
"""Configure validation helper for input number (voluptuous)."""
minimum = cfg.get(CONF_MIN)
maximum = cfg.get(CONF_MAX)
if minimum >= maximum:
raise vol.Invalid(
f"Maximum ({minimum}) is not greater than minimum ({maximum})"
)
state = cfg.get(CONF_INITIAL)
if state is not None and (state < minimum or state > maximum):
raise vol.Invalid(f"Initial value {state} not in range {minimum}-{maximum}")
return cfg |
Return true if the given attributes are equal. | def check_attr_equal(attr1: Mapping, attr2: Mapping, attr_str: str) -> bool:
"""Return true if the given attributes are equal."""
return attr1.get(attr_str) == attr2.get(attr_str) |
Remove duplicated options. | def _remove_duplicates(options: list[str], name: str | None) -> list[str]:
"""Remove duplicated options."""
unique_options = list(dict.fromkeys(options))
# This check was added in 2022.3
# Reject YAML configured input_select with duplicates from 2022.6
if len(unique_options) != len(options):
_LOGGER.warning(
(
"Input select '%s' with options %s had duplicated options, the"
" duplicates have been removed"
),
name or "<unnamed>",
options,
)
return unique_options |
Configure validation helper for input select (voluptuous). | def _cv_input_select(cfg: dict[str, Any]) -> dict[str, Any]:
"""Configure validation helper for input select (voluptuous)."""
options = cfg[CONF_OPTIONS]
initial = cfg.get(CONF_INITIAL)
if initial is not None and initial not in options:
raise vol.Invalid(
f"initial state {initial} is not part of the options: {','.join(options)}"
)
cfg[CONF_OPTIONS] = _remove_duplicates(options, cfg.get(CONF_NAME))
return cfg |
Configure validation helper for input box (voluptuous). | def _cv_input_text(config: dict[str, Any]) -> dict[str, Any]:
"""Configure validation helper for input box (voluptuous)."""
minimum: int = config[CONF_MIN]
maximum: int = config[CONF_MAX]
if minimum > maximum:
raise vol.Invalid(
f"Max len ({minimum}) is not greater than min len ({maximum})"
)
state: str | None = config.get(CONF_INITIAL)
if state is not None and (len(state) < minimum or len(state) > maximum):
raise vol.Invalid(
f"Initial value {state} length not in range {minimum}-{maximum}"
)
return config |
Return the HA platforms for a device type. | def get_device_platforms(device) -> dict[Platform, Iterable[int]]:
"""Return the HA platforms for a device type."""
return DEVICE_PLATFORM.get(type(device), {}) |
Return the list of device groups for a platform. | def get_device_platform_groups(device: Device, platform: Platform) -> Iterable[int]:
"""Return the list of device groups for a platform."""
return get_device_platforms(device).get(platform, []) |
Build the device override schema for config flow. | def build_device_override_schema(
address=vol.UNDEFINED,
cat=vol.UNDEFINED,
subcat=vol.UNDEFINED,
firmware=vol.UNDEFINED,
):
"""Build the device override schema for config flow."""
return vol.Schema(
{
vol.Required(CONF_ADDRESS, default=address): str,
vol.Optional(CONF_CAT, default=cat): str,
vol.Optional(CONF_SUBCAT, default=subcat): str,
}
) |
Build the X10 schema for config flow. | def build_x10_schema(
housecode=vol.UNDEFINED,
unitcode=vol.UNDEFINED,
platform=vol.UNDEFINED,
dim_steps=22,
):
"""Build the X10 schema for config flow."""
if platform == "light":
dim_steps_schema = vol.Required(CONF_DIM_STEPS, default=dim_steps)
else:
dim_steps_schema = vol.Optional(CONF_DIM_STEPS, default=dim_steps)
return vol.Schema(
{
vol.Required(CONF_HOUSECODE, default=housecode): vol.In(HC_LOOKUP.keys()),
vol.Required(CONF_UNITCODE, default=unitcode): vol.In(range(1, 17)),
vol.Required(CONF_PLATFORM, default=platform): vol.In(X10_PLATFORMS),
dim_steps_schema: vol.Range(min=0, max=255),
}
) |
Return the most likely USB port for a PLM. | def _find_likely_port(ports):
"""Return the most likely USB port for a PLM."""
test_strings = ["FTDI", "0403:6001", "10BF:"]
for port, name in ports.items():
for test_string in test_strings:
if test_string in name:
return port
return vol.UNDEFINED |
Build the PLM schema for config flow. | def build_plm_schema(ports: dict[str, str], device=vol.UNDEFINED):
"""Build the PLM schema for config flow."""
if not device or device == vol.UNDEFINED:
device = _find_likely_port(ports)
return vol.Schema({vol.Required(CONF_DEVICE, default=device): vol.In(ports)}) |
Build the manual PLM schema for config flow. | def build_plm_manual_schema(device=vol.UNDEFINED):
"""Build the manual PLM schema for config flow."""
return vol.Schema({vol.Required(CONF_DEVICE, default=device): str}) |
Build the Hub schema for config flow. | def build_hub_schema(
hub_version,
host=vol.UNDEFINED,
port=vol.UNDEFINED,
username=vol.UNDEFINED,
password=vol.UNDEFINED,
):
"""Build the Hub schema for config flow."""
if port == vol.UNDEFINED:
port = PORT_HUB_V2 if hub_version == 2 else PORT_HUB_V1
schema = {
vol.Required(CONF_HOST, default=host): str,
vol.Required(CONF_PORT, default=port): int,
}
if hub_version == 2:
schema[vol.Required(CONF_USERNAME, default=username)] = str
schema[vol.Required(CONF_PASSWORD, default=password)] = str
return vol.Schema(schema) |
Register the events raised by a device. | def _register_event(event: Event, listener: Callable) -> None:
"""Register the events raised by a device."""
_LOGGER.debug(
"Registering on/off event for %s %d %s",
str(event.address),
event.group,
event.name,
)
event.subscribe(listener, force_strong_ref=True) |
Register Insteon device events. | def add_insteon_events(hass: HomeAssistant, device: Device) -> None:
"""Register Insteon device events."""
@callback
def async_fire_insteon_event(
name: str, address: Address, group: int, button: str | None = None
):
# Firing an event when a button is pressed.
if button and button[-2] == "_":
button_id = button[-1].lower()
else:
button_id = None
schema = {CONF_ADDRESS: address, "group": group}
if button_id:
schema[EVENT_CONF_BUTTON] = button_id
if name == ON_EVENT:
event = EVENT_GROUP_ON
elif name == OFF_EVENT:
event = EVENT_GROUP_OFF
elif name == ON_FAST_EVENT:
event = EVENT_GROUP_ON_FAST
elif name == OFF_FAST_EVENT:
event = EVENT_GROUP_OFF_FAST
else:
event = f"insteon.{name}"
_LOGGER.debug("Firing event %s with %s", event, schema)
hass.bus.async_fire(event, schema)
if str(device.address).startswith("X10"):
return
for name_or_group, event in device.events.items():
if isinstance(name_or_group, int):
for event in device.events[name_or_group].values():
_register_event(event, async_fire_insteon_event)
else:
_register_event(event, async_fire_insteon_event) |
Register callback for new Insteon device. | def register_new_device_callback(hass):
"""Register callback for new Insteon device."""
@callback
def async_new_insteon_device(address, action: DeviceAction):
"""Detect device from transport to be delegated to platform."""
if action == DeviceAction.ADDED:
hass.async_create_task(async_create_new_entities(address))
async def async_create_new_entities(address):
_LOGGER.debug(
"Adding new INSTEON device to Home Assistant with address %s", address
)
await devices.async_save(workdir=hass.config.config_dir)
device = devices[address]
await device.async_status()
platforms = get_device_platforms(device)
for platform in platforms:
groups = get_device_platform_groups(device, platform)
signal = f"{SIGNAL_ADD_ENTITIES}_{platform}"
dispatcher_send(hass, signal, {"address": device.address, "groups": groups})
add_insteon_events(hass, device)
devices.subscribe(async_new_insteon_device, force_strong_ref=True) |
Register services used by insteon component. | def async_register_services(hass): # noqa: C901
"""Register services used by insteon component."""
save_lock = asyncio.Lock()
async def async_srv_add_all_link(service: ServiceCall) -> None:
"""Add an INSTEON All-Link between two devices."""
group = service.data[SRV_ALL_LINK_GROUP]
mode = service.data[SRV_ALL_LINK_MODE]
link_mode = mode.lower() == SRV_CONTROLLER
await async_enter_linking_mode(link_mode, group)
async def async_srv_del_all_link(service: ServiceCall) -> None:
"""Delete an INSTEON All-Link between two devices."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_enter_unlinking_mode(group)
async def async_srv_load_aldb(service: ServiceCall) -> None:
"""Load the device All-Link database."""
entity_id = service.data[CONF_ENTITY_ID]
reload = service.data[SRV_LOAD_DB_RELOAD]
if entity_id.lower() == ENTITY_MATCH_ALL:
await async_srv_load_aldb_all(reload)
else:
signal = f"{entity_id}_{SIGNAL_LOAD_ALDB}"
async_dispatcher_send(hass, signal, reload)
async def async_srv_load_aldb_all(reload):
"""Load the All-Link database for all devices."""
# Cannot be done concurrently due to issues with the underlying protocol.
for address in devices:
device = devices[address]
if device != devices.modem and device.cat != 0x03:
await device.aldb.async_load(refresh=reload)
await async_srv_save_devices()
async def async_srv_save_devices():
"""Write the Insteon device configuration to file."""
async with save_lock:
_LOGGER.debug("Saving Insteon devices")
await devices.async_save(hass.config.config_dir)
def print_aldb(service: ServiceCall) -> None:
"""Print the All-Link Database for a device."""
# For now this sends logs to the log file.
# Future direction is to create an INSTEON control panel.
entity_id = service.data[CONF_ENTITY_ID]
signal = f"{entity_id}_{SIGNAL_PRINT_ALDB}"
dispatcher_send(hass, signal)
def print_im_aldb(service: ServiceCall) -> None:
"""Print the All-Link Database for a device."""
# For now this sends logs to the log file.
# Future direction is to create an INSTEON control panel.
print_aldb_to_log(devices.modem.aldb)
async def async_srv_x10_all_units_off(service: ServiceCall) -> None:
"""Send the X10 All Units Off command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_units_off(housecode)
async def async_srv_x10_all_lights_off(service: ServiceCall) -> None:
"""Send the X10 All Lights Off command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_lights_off(housecode)
async def async_srv_x10_all_lights_on(service: ServiceCall) -> None:
"""Send the X10 All Lights On command."""
housecode = service.data.get(SRV_HOUSECODE)
await async_x10_all_lights_on(housecode)
async def async_srv_scene_on(service: ServiceCall) -> None:
"""Trigger an INSTEON scene ON."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_trigger_scene_on(group)
async def async_srv_scene_off(service: ServiceCall) -> None:
"""Trigger an INSTEON scene ON."""
group = service.data.get(SRV_ALL_LINK_GROUP)
await async_trigger_scene_off(group)
@callback
def async_add_default_links(service: ServiceCall) -> None:
"""Add the default All-Link entries to a device."""
entity_id = service.data[CONF_ENTITY_ID]
signal = f"{entity_id}_{SIGNAL_ADD_DEFAULT_LINKS}"
async_dispatcher_send(hass, signal)
async def async_add_device_override(override):
"""Remove an Insten device and associated entities."""
address = Address(override[CONF_ADDRESS])
await async_remove_ha_device(address)
devices.set_id(address, override[CONF_CAT], override[CONF_SUBCAT], 0)
await async_srv_save_devices()
async def async_remove_device_override(address):
"""Remove an Insten device and associated entities."""
address = Address(address)
await async_remove_ha_device(address)
devices.set_id(address, None, None, None)
await devices.async_identify_device(address)
await async_srv_save_devices()
@callback
def async_add_x10_device(x10_config):
"""Add X10 device."""
housecode = x10_config[CONF_HOUSECODE]
unitcode = x10_config[CONF_UNITCODE]
platform = x10_config[CONF_PLATFORM]
steps = x10_config.get(CONF_DIM_STEPS, 22)
x10_type = "on_off"
if platform == "light":
x10_type = "dimmable"
elif platform == "binary_sensor":
x10_type = "sensor"
_LOGGER.debug(
"Adding X10 device to Insteon: %s %d %s", housecode, unitcode, x10_type
)
# This must be run in the event loop
devices.add_x10_device(housecode, unitcode, x10_type, steps)
async def async_remove_x10_device(housecode, unitcode):
"""Remove an X10 device and associated entities."""
address = create_x10_address(housecode, unitcode)
devices.pop(address)
await async_remove_ha_device(address)
async def async_remove_ha_device(address: Address, remove_all_refs: bool = False):
"""Remove the device and all entities from hass."""
signal = f"{address.id}_{SIGNAL_REMOVE_ENTITY}"
async_dispatcher_send(hass, signal)
dev_registry = dr.async_get(hass)
device = dev_registry.async_get_device(identifiers={(DOMAIN, str(address))})
if device:
dev_registry.async_remove_device(device.id)
async def async_remove_insteon_device(
address: Address, remove_all_refs: bool = False
):
"""Remove the underlying Insteon device from the network."""
await devices.async_remove_device(
address=address, force=False, remove_all_refs=remove_all_refs
)
await async_srv_save_devices()
hass.services.async_register(
DOMAIN, SRV_ADD_ALL_LINK, async_srv_add_all_link, schema=ADD_ALL_LINK_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_DEL_ALL_LINK, async_srv_del_all_link, schema=DEL_ALL_LINK_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_LOAD_ALDB, async_srv_load_aldb, schema=LOAD_ALDB_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_PRINT_ALDB, print_aldb, schema=PRINT_ALDB_SCHEMA
)
hass.services.async_register(DOMAIN, SRV_PRINT_IM_ALDB, print_im_aldb, schema=None)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_UNITS_OFF,
async_srv_x10_all_units_off,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_LIGHTS_OFF,
async_srv_x10_all_lights_off,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SRV_X10_ALL_LIGHTS_ON,
async_srv_x10_all_lights_on,
schema=X10_HOUSECODE_SCHEMA,
)
hass.services.async_register(
DOMAIN, SRV_SCENE_ON, async_srv_scene_on, schema=TRIGGER_SCENE_SCHEMA
)
hass.services.async_register(
DOMAIN, SRV_SCENE_OFF, async_srv_scene_off, schema=TRIGGER_SCENE_SCHEMA
)
hass.services.async_register(
DOMAIN,
SRV_ADD_DEFAULT_LINKS,
async_add_default_links,
schema=ADD_DEFAULT_LINKS_SCHEMA,
)
async_dispatcher_connect(hass, SIGNAL_SAVE_DEVICES, async_srv_save_devices)
async_dispatcher_connect(
hass, SIGNAL_ADD_DEVICE_OVERRIDE, async_add_device_override
)
async_dispatcher_connect(
hass, SIGNAL_REMOVE_DEVICE_OVERRIDE, async_remove_device_override
)
async_dispatcher_connect(hass, SIGNAL_ADD_X10_DEVICE, async_add_x10_device)
async_dispatcher_connect(hass, SIGNAL_REMOVE_X10_DEVICE, async_remove_x10_device)
async_dispatcher_connect(hass, SIGNAL_REMOVE_HA_DEVICE, async_remove_ha_device)
async_dispatcher_connect(
hass, SIGNAL_REMOVE_INSTEON_DEVICE, async_remove_insteon_device
)
_LOGGER.debug("Insteon Services registered") |
Print the All-Link Database to the log file. | def print_aldb_to_log(aldb):
"""Print the All-Link Database to the log file."""
logger = logging.getLogger(f"{__name__}.links")
logger.info("%s ALDB load status is %s", aldb.address, aldb.status.name)
if aldb.status not in [ALDBStatus.LOADED, ALDBStatus.PARTIAL]:
_LOGGER.warning("All-Link database not loaded")
logger.info("RecID In Use Mode HWM Group Address Data 1 Data 2 Data 3")
logger.info("----- ------ ---- --- ----- -------- ------ ------ ------")
for mem_addr in aldb:
rec = aldb[mem_addr]
# For now we write this to the log
# Roadmap is to create a configuration panel
in_use = "Y" if rec.is_in_use else "N"
mode = "C" if rec.is_controller else "R"
hwm = "Y" if rec.is_high_water_mark else "N"
log_msg = (
f" {rec.mem_addr:04x} {in_use:s} {mode:s} {hwm:s} "
f"{rec.group:3d} {str(rec.target):s} {rec.data1:3d} "
f"{rec.data2:3d} {rec.data3:3d}"
)
logger.info(log_msg) |
Add an Insteon group to a platform. | def async_add_insteon_entities(
hass: HomeAssistant,
platform: Platform,
entity_type: type[InsteonEntity],
async_add_entities: AddEntitiesCallback,
discovery_info: dict[str, Any],
) -> None:
"""Add an Insteon group to a platform."""
address = discovery_info["address"]
device = devices[address]
new_entities = [
entity_type(device=device, group=group) for group in discovery_info["groups"]
]
async_add_entities(new_entities) |
Add all entities to a platform. | def async_add_insteon_devices(
hass: HomeAssistant,
platform: Platform,
entity_type: type[InsteonEntity],
async_add_entities: AddEntitiesCallback,
) -> None:
"""Add all entities to a platform."""
for address in devices:
device = devices[address]
groups = get_device_platform_groups(device, platform)
discovery_info = {"address": address, "groups": groups}
async_add_insteon_entities(
hass, platform, entity_type, async_add_entities, discovery_info
) |
Return a dict of USB ports and their friendly names. | def get_usb_ports() -> dict[str, str]:
"""Return a dict of USB ports and their friendly names."""
ports = list_ports.comports()
port_descriptions = {}
for port in ports:
vid: str | None = None
pid: str | None = None
if port.vid is not None and port.pid is not None:
usb_device = usb.usb_device_from_port(port)
vid = usb_device.vid
pid = usb_device.pid
dev_path = usb.get_serial_by_id(port.device)
human_name = usb.human_readable_device_name(
dev_path,
port.serial_number,
port.manufacturer,
port.description,
vid,
pid,
)
port_descriptions[dev_path] = human_name
return port_descriptions |
Create an Insteon device. | def create_insteon_device(hass, device, config_entry_id):
"""Create an Insteon device."""
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry_id, # entry.entry_id,
identifiers={(DOMAIN, str(device.address))},
manufacturer="SmartLabs, Inc",
name=f"{device.description} {device.address}",
model=f"{device.model} ({device.cat!r}, 0x{device.subcat:02x})",
sw_version=f"{device.firmware:02x} Engine Version: {device.engine_version}",
) |
Return the Insteon configuration entry. | def get_insteon_config_entry(hass: HomeAssistant) -> ConfigEntry:
"""Return the Insteon configuration entry."""
return hass.config_entries.async_entries(DOMAIN)[0] |
Add an X10 device to the Insteon integration. | def add_x10_device(hass: HomeAssistant, x10_device: X10DeviceConfig):
"""Add an X10 device to the Insteon integration."""
config_entry = get_insteon_config_entry(hass)
x10_config = config_entry.options.get(CONF_X10, [])
if any(
device[CONF_HOUSECODE] == x10_device["housecode"]
and device[CONF_UNITCODE] == x10_device["unitcode"]
for device in x10_config
):
raise ValueError("Duplicate X10 device")
hass.config_entries.async_update_entry(
entry=config_entry,
options=config_entry.options | {CONF_X10: [*x10_config, x10_device]},
)
async_dispatcher_send(hass, SIGNAL_ADD_X10_DEVICE, x10_device) |
Remove an X10 device from the config. | def remove_x10_device(hass: HomeAssistant, housecode: str, unitcode: int):
"""Remove an X10 device from the config."""
config_entry = get_insteon_config_entry(hass)
new_options = {**config_entry.options}
new_x10 = [
existing_device
for existing_device in config_entry.options.get(CONF_X10, [])
if existing_device[CONF_HOUSECODE].lower() != housecode.lower()
or existing_device[CONF_UNITCODE] != unitcode
]
new_options[CONF_X10] = new_x10
hass.config_entries.async_update_entry(entry=config_entry, options=new_options) |
Add an Insteon device override. | def add_device_overide(hass: HomeAssistant, override: DeviceOverride):
"""Add an Insteon device override."""
config_entry = get_insteon_config_entry(hass)
override_config = config_entry.options.get(CONF_OVERRIDE, [])
address = Address(override[CONF_ADDRESS])
if any(
Address(existing_override[CONF_ADDRESS]) == address
for existing_override in override_config
):
raise ValueError("Duplicate override")
hass.config_entries.async_update_entry(
entry=config_entry,
options=config_entry.options | {CONF_OVERRIDE: [*override_config, override]},
)
async_dispatcher_send(hass, SIGNAL_ADD_DEVICE_OVERRIDE, override) |
Remove a device override from config. | def remove_device_override(hass: HomeAssistant, address: Address):
"""Remove a device override from config."""
config_entry = get_insteon_config_entry(hass)
new_options = {**config_entry.options}
new_overrides = [
existing_override
for existing_override in config_entry.options.get(CONF_OVERRIDE, [])
if Address(existing_override[CONF_ADDRESS]) != address
]
new_options[CONF_OVERRIDE] = new_overrides
hass.config_entries.async_update_entry(entry=config_entry, options=new_options) |
Return the HA device name. | def compute_device_name(ha_device):
"""Return the HA device name."""
return ha_device.name_by_user if ha_device.name_by_user else ha_device.name |
Return the Insteon device from an HA device. | def get_insteon_device_from_ha_device(ha_device):
"""Return the Insteon device from an HA device."""
for identifier in ha_device.identifiers:
if len(identifier) > 1 and identifier[0] == DOMAIN and devices[identifier[1]]:
return devices[identifier[1]]
return None |
Notify the caller that the device was not found. | def notify_device_not_found(connection, msg, text):
"""Notify the caller that the device was not found."""
connection.send_message(
websocket_api.error_message(msg[ID], websocket_api.const.ERR_NOT_FOUND, text)
) |
Return a constant value schema. | def _read_only_schema(name, value):
"""Return a constant value schema."""
return voluptuous_serialize.convert(vol.Schema({vol.Required(name): value}))[0] |
Return the correct schema type. | def get_schema(prop, name, groups):
"""Return the correct schema type."""
if prop.is_read_only:
return _read_only_schema(name, prop.value)
if name == RAMP_RATE_IN_SEC:
return _list_schema(name, RAMP_RATE_LIST)
if name == RADIO_BUTTON_GROUPS:
button_list = {str(group): groups[group].name for group in groups}
return _multi_select_schema(name, button_list)
if name == LOAD_BUTTON:
button_list = {group: groups[group].name for group in groups}
return _list_schema(name, button_list)
if prop.value_type == bool:
return _bool_schema(name)
if prop.value_type == int:
return _byte_schema(name)
if prop.value_type == float:
return _float_schema(name)
if prop.value_type == ToggleMode:
return _list_schema(name, TOGGLE_MODES)
if prop.value_type == RelayMode:
return _list_schema(name, RELAY_MODES)
return None |
Get the properties of an Insteon device and return the records and schema. | def get_properties(device: Device, show_advanced=False):
"""Get the properties of an Insteon device and return the records and schema."""
properties = []
schema = {}
for name, prop in device.configuration.items():
if prop.is_read_only and not show_advanced:
continue
prop_schema = get_schema(prop, name, device.groups)
if prop_schema is None:
continue
schema[name] = prop_schema
properties.append(property_to_dict(prop))
if show_advanced:
for name, prop in device.operating_flags.items():
if prop.property_type != PropertyType.ADVANCED:
continue
prop_schema = get_schema(prop, name, device.groups)
if prop_schema is not None:
schema[name] = prop_schema
properties.append(property_to_dict(prop))
for name, prop in device.properties.items():
if prop.property_type != PropertyType.ADVANCED:
continue
prop_schema = get_schema(prop, name, device.groups)
if prop_schema is not None:
schema[name] = prop_schema
properties.append(property_to_dict(prop))
return properties, schema |
Return a property data row. | def property_to_dict(prop):
"""Return a property data row."""
value = get_usable_value(prop)
modified = value == prop.new_value
if prop.value_type in [ToggleMode, RelayMode] or prop.name == RAMP_RATE_IN_SEC:
value = str(value).lower()
return {"name": prop.name, "value": value, "modified": modified} |
Update the value of a device property. | def update_property(device, prop_name, value):
"""Update the value of a device property."""
prop = device.configuration[prop_name]
if prop.value_type == ToggleMode:
toggle_mode = getattr(ToggleMode, value.upper())
prop.new_value = toggle_mode
elif prop.value_type == RelayMode:
relay_mode = getattr(RelayMode, value.upper())
prop.new_value = relay_mode
else:
prop.new_value = value |
Return a dictionary mapping of a scene. | def _scene_to_dict(scene):
"""Return a dictionary mapping of a scene."""
device_dict = {}
for addr, links in scene["devices"].items():
str_addr = str(addr)
device_dict[str_addr] = []
for data in links:
device_dict[str_addr].append(
{
"data1": data.data1,
"data2": data.data2,
"data3": data.data3,
"has_controller": data.has_controller,
"has_responder": data.has_responder,
}
)
return {"name": scene["name"], "group": scene["group"], "devices": device_dict} |
Set up the web socket API. | def async_load_api(hass):
"""Set up the web socket API."""
websocket_api.async_register_command(hass, websocket_get_device)
websocket_api.async_register_command(hass, websocket_add_device)
websocket_api.async_register_command(hass, websocket_cancel_add_device)
websocket_api.async_register_command(hass, websocket_get_scenes)
websocket_api.async_register_command(hass, websocket_get_scene)
websocket_api.async_register_command(hass, websocket_save_scene)
websocket_api.async_register_command(hass, websocket_delete_scene)
websocket_api.async_register_command(hass, websocket_get_aldb)
websocket_api.async_register_command(hass, websocket_change_aldb_record)
websocket_api.async_register_command(hass, websocket_create_aldb_record)
websocket_api.async_register_command(hass, websocket_write_aldb)
websocket_api.async_register_command(hass, websocket_load_aldb)
websocket_api.async_register_command(hass, websocket_reset_aldb)
websocket_api.async_register_command(hass, websocket_add_default_links)
websocket_api.async_register_command(hass, websocket_notify_on_aldb_status)
websocket_api.async_register_command(hass, websocket_add_x10_device)
websocket_api.async_register_command(hass, websocket_remove_device)
websocket_api.async_register_command(hass, websocket_get_properties)
websocket_api.async_register_command(hass, websocket_change_properties_record)
websocket_api.async_register_command(hass, websocket_write_properties)
websocket_api.async_register_command(hass, websocket_load_properties)
websocket_api.async_register_command(hass, websocket_reset_properties)
websocket_api.async_register_command(hass, websocket_get_config)
websocket_api.async_register_command(hass, websocket_get_modem_schema)
websocket_api.async_register_command(hass, websocket_update_modem_config)
websocket_api.async_register_command(hass, websocket_add_device_override)
websocket_api.async_register_command(hass, websocket_remove_device_override) |
Define a sensor that takes into account timezone. | def _time_remaining_to_timestamp(data: IntellifirePollData) -> datetime | None:
"""Define a sensor that takes into account timezone."""
if not (seconds_offset := data.timeremaining_s):
return None
return utcnow() + timedelta(seconds=seconds_offset) |
Define a sensor that takes into account a timezone. | def _downtime_to_timestamp(data: IntellifirePollData) -> datetime | None:
"""Define a sensor that takes into account a timezone."""
if not (seconds_offset := data.downtime):
return None
return utcnow() - timedelta(seconds=seconds_offset) |
Load YAML intents into the intent system. | def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None:
"""Load YAML intents into the intent system."""
template.attach(hass, intents)
hass.data[DOMAIN] = intents
for intent_type, conf in intents.items():
if CONF_ACTION in conf:
script_mode: str = conf.get(CONF_MODE, script.DEFAULT_SCRIPT_MODE)
conf[CONF_ACTION] = script.Script(
hass,
conf[CONF_ACTION],
f"Intent Script {intent_type}",
DOMAIN,
script_mode=script_mode,
)
intent.async_register(hass, ScriptIntentHandler(intent_type, conf)) |
Output rate limit log line at given level. | def log_rate_limits(
hass: HomeAssistant, target: str, resp: dict[str, Any], level: int = 20
) -> None:
"""Output rate limit log line at given level."""
rate_limits = resp["rateLimits"]
resetsAt = dt_util.parse_datetime(rate_limits["resetsAt"])
resetsAtTime = resetsAt - dt_util.utcnow() if resetsAt is not None else "---"
rate_limit_msg = (
"iOS push notification rate limits for %s: "
"%d sent, %d allowed, %d errors, "
"resets in %s"
)
_LOGGER.log(
level,
rate_limit_msg,
ios.device_name_for_push_id(hass, target),
rate_limits["successful"],
rate_limits["maximum"],
rate_limits["errors"],
str(resetsAtTime).split(".", maxsplit=1)[0],
) |
Get the iOS notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> iOSNotificationService | None:
"""Get the iOS notification service."""
if "ios.notify" not in hass.config.components:
# Need this to enable requirements checking in the app.
hass.config.components.add("ios.notify")
if not ios.devices_with_push(hass):
return None
return iOSNotificationService() |
Set up the iOS sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the iOS sensor.""" |
Return a dictionary of push enabled targets. | def devices_with_push(hass: HomeAssistant) -> dict[str, str]:
"""Return a dictionary of push enabled targets."""
return {
device_name: device.get(ATTR_PUSH_ID)
for device_name, device in hass.data[DOMAIN][ATTR_DEVICES].items()
if device.get(ATTR_PUSH_ID) is not None
} |
Return a list of push enabled target push IDs. | def enabled_push_ids(hass: HomeAssistant) -> list[str]:
"""Return a list of push enabled target push IDs."""
return [
device.get(ATTR_PUSH_ID)
for device in hass.data[DOMAIN][ATTR_DEVICES].values()
if device.get(ATTR_PUSH_ID) is not None
] |
Return a dictionary of all identified devices. | def devices(hass: HomeAssistant) -> dict[str, dict[str, Any]]:
"""Return a dictionary of all identified devices."""
return hass.data[DOMAIN][ATTR_DEVICES] |
Return the device name for the push ID. | def device_name_for_push_id(hass: HomeAssistant, push_id: str) -> str | None:
"""Return the device name for the push ID."""
for device_name, device in hass.data[DOMAIN][ATTR_DEVICES].items():
if device.get(ATTR_PUSH_ID) is push_id:
return device_name # type: ignore[no-any-return]
return None |
Register system health callbacks. | def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info) |
Calculate the "moving average" of a set of indices. | def calculate_trend(indices: list[float]) -> str:
"""Calculate the "moving average" of a set of indices."""
index_range = np.arange(0, len(indices))
index_array = np.array(indices)
linear_fit = np.polyfit(index_range, index_array, 1)
slope = round(linear_fit[0], 2)
if slope > 0:
return TREND_INCREASING
if slope < 0:
return TREND_SUBSIDING
return TREND_FLAT |
Set up the Irish Rail transport sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Irish Rail transport sensor."""
station = config.get(CONF_STATION)
direction = config.get(CONF_DIRECTION)
destination = config.get(CONF_DESTINATION)
stops_at = config.get(CONF_STOPS_AT)
name = config.get(CONF_NAME)
irish_rail = IrishRailRTPI()
data = IrishRailTransportData(irish_rail, station, direction, destination, stops_at)
add_entities(
[
IrishRailTransportSensor(
data, station, direction, destination, stops_at, name
)
],
True,
) |
Retrieve data from the pyiss API. | def update(iss: pyiss.ISS) -> IssData:
"""Retrieve data from the pyiss API."""
return IssData(
number_of_people_in_space=iss.number_of_people_in_space(),
current_location=iss.current_location(),
) |
Generate schema with defaults. | def _data_schema(schema_input: dict[str, str]) -> vol.Schema:
"""Generate schema with defaults."""
return vol.Schema(
{
vol.Required(CONF_HOST, default=schema_input.get(CONF_HOST, "")): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_TLS_VER, default=DEFAULT_TLS_VERSION): vol.In([1.1, 1.2]),
},
extra=vol.ALLOW_EXTRA,
) |
Check if the node matches the node_def_id for any platforms.
This is only present on the 5.0 ISY firmware, and is the most reliable
way to determine a device's type. | def _check_for_node_def(
isy_data: IsyData, node: Group | Node, single_platform: Platform | None = None
) -> bool:
"""Check if the node matches the node_def_id for any platforms.
This is only present on the 5.0 ISY firmware, and is the most reliable
way to determine a device's type.
"""
if not hasattr(node, "node_def_id") or node.node_def_id is None:
# Node doesn't have a node_def (pre 5.0 firmware most likely)
return False
node_def_id = node.node_def_id
platforms = NODE_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if node_def_id in NODE_FILTERS[platform][FILTER_NODE_DEF_ID]:
isy_data.nodes[platform].append(node)
return True
return False |
Check if the node matches the Insteon type for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Insteon device. "Node Server" (v5+) and Z-Wave and others will
not have a type. | def _check_for_insteon_type(
isy_data: IsyData, node: Group | Node, single_platform: Platform | None = None
) -> bool:
"""Check if the node matches the Insteon type for any platforms.
This is for (presumably) every version of the ISY firmware, but only
works for Insteon device. "Node Server" (v5+) and Z-Wave and others will
not have a type.
"""
if node.protocol != PROTO_INSTEON:
return False
if not hasattr(node, "type") or node.type is None:
# Node doesn't have a type (non-Insteon device most likely)
return False
device_type = node.type
platforms = NODE_PLATFORMS if not single_platform else [single_platform]
for platform in platforms:
if any(
device_type.startswith(t)
for t in set(NODE_FILTERS[platform][FILTER_INSTEON_TYPE])
):
# Hacky special-cases for certain devices with different platforms
# included as subnodes. Note that special-cases are not necessary
# on ISY 5.x firmware as it uses the superior NodeDefs method
subnode_id = int(node.address.split(" ")[-1], 16)
# FanLinc, which has a light module as one of its nodes.
if platform == Platform.FAN and subnode_id == SUBNODE_FANLINC_LIGHT:
isy_data.nodes[Platform.LIGHT].append(node)
return True
# Thermostats, which has a "Heat" and "Cool" sub-node on address 2 and 3
if platform == Platform.CLIMATE and subnode_id in (
SUBNODE_CLIMATE_COOL,
SUBNODE_CLIMATE_HEAT,
):
isy_data.nodes[Platform.BINARY_SENSOR].append(node)
return True
# IOLincs which have a sensor and relay on 2 different nodes
if (
platform == Platform.BINARY_SENSOR
and device_type.startswith(TYPE_CATEGORY_SENSOR_ACTUATORS)
and subnode_id == SUBNODE_IOLINC_RELAY
):
isy_data.nodes[Platform.SWITCH].append(node)
return True
# Smartenit EZIO2X4
if (
platform == Platform.SWITCH
and device_type.startswith(TYPE_EZIO2X4)
and subnode_id in SUBNODE_EZIO2X4_SENSORS
):
isy_data.nodes[Platform.BINARY_SENSOR].append(node)
return True
isy_data.nodes[platform].append(node)
return True
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.