response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Return Tado Mode based on Presence attribute. | def get_tado_mode(data: dict[str, str]) -> str | None:
"""Return Tado Mode based on Presence attribute."""
if "presence" in data:
return data["presence"]
return None |
Return whether Automatic Geofencing is enabled based on Presence Locked attribute. | def get_automatic_geofencing(data: dict[str, str]) -> bool:
"""Return whether Automatic Geofencing is enabled based on Presence Locked attribute."""
if "presenceLocked" in data:
if data["presenceLocked"]:
return False
return True
return False |
Return Geofencing Mode based on Presence and Presence Locked attributes. | def get_geofencing_mode(data: dict[str, str]) -> str:
"""Return Geofencing Mode based on Presence and Presence Locked attributes."""
tado_mode = ""
tado_mode = data.get("presence", "unknown")
geofencing_switch_mode = ""
if "presenceLocked" in data:
if data["presenceLocked"]:
geofencing_switch_mode = "manual"
else:
geofencing_switch_mode = "auto"
else:
geofencing_switch_mode = "manual"
return f"{tado_mode.capitalize()} ({geofencing_switch_mode.capitalize()})" |
Set up the services for the Tado integration. | def setup_services(hass: HomeAssistant) -> None:
"""Set up the services for the Tado integration."""
async def add_meter_reading(call: ServiceCall) -> None:
"""Send meter reading to Tado."""
entry_id: str = call.data[CONF_CONFIG_ENTRY]
reading: int = call.data[CONF_READING]
_LOGGER.debug("Add meter reading %s", reading)
tadoconnector = hass.data[DOMAIN][entry_id][DATA]
response: dict = await hass.async_add_executor_job(
tadoconnector.set_meter_reading, call.data[CONF_READING]
)
if ATTR_MESSAGE in response:
raise HomeAssistantError(response[ATTR_MESSAGE])
hass.services.async_register(
DOMAIN, SERVICE_ADD_METER_READING, add_meter_reading, SCHEMA_ADD_METER_READING
) |
Create all water heater entities. | def _generate_entities(tado: TadoConnector) -> list[WaterHeaterEntity]:
"""Create all water heater entities."""
entities = []
for zone in tado.zones:
if zone["type"] == TYPE_HOT_WATER:
entity = create_water_heater_entity(tado, zone["name"], zone["id"], zone)
entities.append(entity)
return entities |
Create a Tado water heater device. | def create_water_heater_entity(tado: TadoConnector, name: str, zone_id: int, zone: str):
"""Create a Tado water heater device."""
capabilities = tado.get_capabilities(zone_id)
supports_temperature_control = capabilities["canSetTemperature"]
if supports_temperature_control and "temperatures" in capabilities:
temperatures = capabilities["temperatures"]
min_temp = float(temperatures["celsius"]["min"])
max_temp = float(temperatures["celsius"]["max"])
else:
min_temp = None
max_temp = None
return TadoWaterHeater(
tado,
name,
zone_id,
supports_temperature_control,
min_temp,
max_temp,
) |
Set up the Tank Utility sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Tank Utility sensor."""
email = config[CONF_EMAIL]
password = config[CONF_PASSWORD]
devices = config[CONF_DEVICES]
try:
token = auth.get_token(email, password)
except requests.exceptions.HTTPError as http_error:
if http_error.response.status_code == requests.codes.unauthorized:
_LOGGER.error("Invalid credentials")
return
all_sensors = []
for device in devices:
sensor = TankUtilitySensor(email, password, token, device)
all_sensors.append(sensor)
add_entities(all_sensors, True) |
Set up the Taps Aff binary sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Taps Aff binary sensor."""
name = config.get(CONF_NAME)
location = config.get(CONF_LOCATION)
taps_aff_data = TapsAffData(location)
add_entities([TapsAffSensor(taps_aff_data, name)], True) |
Clear entry in ALREADY_DISCOVERED list. | def clear_discovery_hash(
hass: HomeAssistant, discovery_hash: DiscoveryHashType
) -> None:
"""Clear entry in ALREADY_DISCOVERED list."""
if ALREADY_DISCOVERED not in hass.data:
# Discovery is shutting down
return
del hass.data[ALREADY_DISCOVERED][discovery_hash] |
Set entry in ALREADY_DISCOVERED list. | def set_discovery_hash(hass: HomeAssistant, discovery_hash: DiscoveryHashType) -> None:
"""Set entry in ALREADY_DISCOVERED list."""
hass.data[ALREADY_DISCOVERED][discovery_hash] = {} |
Log and create repairs issue if several devices share the same topic. | def warn_if_topic_duplicated(
hass: HomeAssistant,
command_topic: str,
own_mac: str | None,
own_device_config: TasmotaDeviceConfig,
) -> bool:
"""Log and create repairs issue if several devices share the same topic."""
duplicated = False
offenders = []
for other_mac, other_config in hass.data[DISCOVERY_DATA].items():
if own_mac and other_mac == own_mac:
continue
if command_topic == get_topic_command(other_config):
offenders.append((other_mac, tasmota_get_device_config(other_config)))
issue_id = f"topic_duplicated_{command_topic}"
if offenders:
if own_mac:
offenders.append((own_mac, own_device_config))
offender_strings = [
f"'{cfg[tasmota_const.CONF_NAME]}' ({cfg[tasmota_const.CONF_IP]})"
for _, cfg in offenders
]
_LOGGER.warning(
(
"Multiple Tasmota devices are sharing the same topic '%s'. Offending"
" devices: %s"
),
command_topic,
", ".join(offender_strings),
)
ir.async_create_issue(
hass,
DOMAIN,
issue_id,
data={
"key": "topic_duplicated",
"mac": " ".join([mac for mac, _ in offenders]),
"topic": command_topic,
},
is_fixable=False,
learn_more_url=MQTT_TOPIC_URL,
severity=ir.IssueSeverity.ERROR,
translation_key="topic_duplicated",
translation_placeholders={
"topic": command_topic,
"offenders": "\n\n* " + "\n\n* ".join(offender_strings),
},
)
duplicated = True
return duplicated |
Clamp value to the range 0..255. | def clamp(value: float) -> float:
"""Clamp value to the range 0..255."""
return min(max(value, 0), 255) |
Scale brightness from 0..255 to 1..100. | def scale_brightness(brightness: float) -> float:
"""Scale brightness from 0..255 to 1..100."""
brightness_normalized = brightness / DEFAULT_BRIGHTNESS_MAX
device_brightness = min(
round(brightness_normalized * TASMOTA_BRIGHTNESS_MAX),
TASMOTA_BRIGHTNESS_MAX,
)
# Make sure the brightness is not rounded down to 0
return max(device_brightness, 1) |
Add or update device registry. | def _update_device(
hass: HomeAssistant,
config_entry: ConfigEntry,
config: TasmotaDeviceConfig,
device_registry: DeviceRegistry,
) -> None:
"""Add or update device registry."""
_LOGGER.debug("Adding or updating tasmota device %s", config[CONF_MAC])
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
configuration_url=f"http://{config[CONF_IP]}/",
connections={(CONNECTION_NETWORK_MAC, config[CONF_MAC])},
manufacturer=config[CONF_MANUFACTURER],
model=config[CONF_MODEL],
name=config[CONF_NAME],
sw_version=config[CONF_SW_VERSION],
) |
Get top statistics. | def get_top_stats(
home_stats: PyTautulliApiHomeStats, activity: PyTautulliApiActivity, key: str
) -> str | None:
"""Get top statistics."""
value = None
for stat in home_stats:
if stat.rows and stat.stat_id == key:
value = stat.rows[0].title
elif stat.rows and stat.stat_id == "top_users" and key == ATTR_TOP_USER:
value = stat.rows[0].user
return value |
Set up the TCP binary sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the TCP binary sensor."""
add_entities([TcpBinarySensor(hass, config)]) |
Set up the TCP Sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the TCP Sensor."""
add_entities([TcpSensor(hass, config)]) |
Decorate TechnoVE calls to handle TechnoVE exceptions.
A decorator that wraps the passed in function, catches TechnoVE errors,
and handles the availability of the device in the data coordinator. | def technove_exception_handler(
func: Callable[Concatenate[_TechnoVEEntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_TechnoVEEntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate TechnoVE calls to handle TechnoVE exceptions.
A decorator that wraps the passed in function, catches TechnoVE errors,
and handles the availability of the device in the data coordinator.
"""
async def handler(
self: _TechnoVEEntityT, *args: _P.args, **kwargs: _P.kwargs
) -> None:
try:
await func(self, *args, **kwargs)
except TechnoVEConnectionError as error:
self.coordinator.last_update_success = False
self.coordinator.async_update_listeners()
raise HomeAssistantError("Error communicating with TechnoVE API") from error
except TechnoVEError as error:
raise HomeAssistantError("Invalid response from TechnoVE API") from error
return handler |
Set up the Ted5000 sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Ted5000 sensor."""
host: str = config[CONF_HOST]
port: int = config[CONF_PORT]
name: str = config[CONF_NAME]
url = f"http://{host}:{port}/api/LiveData.xml"
gateway = Ted5000Gateway(url)
# Get MUT information to create the sensors.
gateway.update()
add_entities(
Ted5000Sensor(gateway, name, mtu, description)
for mtu in gateway.data
for description in SENSORS
) |
Get the Telegram notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> TelegramNotificationService:
"""Get the Telegram notification service."""
setup_reload_service(hass, TELEGRAM_DOMAIN, PLATFORMS)
chat_id = config.get(CONF_CHAT_ID)
return TelegramNotificationService(hass, chat_id) |
Initialize telegram bot with proxy support. | def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot:
"""Initialize telegram bot with proxy support."""
api_key: str = p_config[CONF_API_KEY]
proxy_url: str | None = p_config.get(CONF_PROXY_URL)
proxy_params: dict | None = p_config.get(CONF_PROXY_PARAMS)
if proxy_url is not None:
auth = None
if proxy_params is None:
# CONF_PROXY_PARAMS has been kept for backwards compatibility.
proxy_params = {}
elif "username" in proxy_params and "password" in proxy_params:
# Auth can actually be stuffed into the URL, but the docs have previously
# indicated to put them here.
auth = proxy_params.pop("username"), proxy_params.pop("password")
ir.async_create_issue(
hass,
DOMAIN,
"proxy_params_auth_deprecation",
breaks_in_ha_version="2024.10.0",
is_persistent=False,
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_placeholders={
"proxy_params": CONF_PROXY_PARAMS,
"proxy_url": CONF_PROXY_URL,
"telegram_bot": "Telegram bot",
},
translation_key="proxy_params_auth_deprecation",
learn_more_url="https://github.com/home-assistant/core/pull/112778",
)
else:
ir.async_create_issue(
hass,
DOMAIN,
"proxy_params_deprecation",
breaks_in_ha_version="2024.10.0",
is_persistent=False,
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_placeholders={
"proxy_params": CONF_PROXY_PARAMS,
"proxy_url": CONF_PROXY_URL,
"httpx": "httpx",
"telegram_bot": "Telegram bot",
},
translation_key="proxy_params_deprecation",
learn_more_url="https://github.com/home-assistant/core/pull/112778",
)
proxy = httpx.Proxy(proxy_url, auth=auth, **proxy_params)
request = HTTPXRequest(connection_pool_size=8, proxy=proxy)
else:
request = HTTPXRequest(connection_pool_size=8)
return Bot(token=api_key, request=request) |
Set up the Tellstick covers. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Tellstick covers."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickCover(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
) |
Set up the Tellstick lights. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Tellstick lights."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickLight(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
) |
Set up the Tellstick sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Tellstick sensors."""
sensor_value_descriptions = {
tellcore_constants.TELLSTICK_TEMPERATURE: DatatypeDescription(
"temperature",
config.get(CONF_TEMPERATURE_SCALE),
SensorDeviceClass.TEMPERATURE,
),
tellcore_constants.TELLSTICK_HUMIDITY: DatatypeDescription(
"humidity",
PERCENTAGE,
SensorDeviceClass.HUMIDITY,
),
tellcore_constants.TELLSTICK_RAINRATE: DatatypeDescription(
"rain rate", "", None
),
tellcore_constants.TELLSTICK_RAINTOTAL: DatatypeDescription(
"rain total", "", None
),
tellcore_constants.TELLSTICK_WINDDIRECTION: DatatypeDescription(
"wind direction", "", None
),
tellcore_constants.TELLSTICK_WINDAVERAGE: DatatypeDescription(
"wind average", "", None
),
tellcore_constants.TELLSTICK_WINDGUST: DatatypeDescription(
"wind gust", "", None
),
}
try:
tellcore_lib = telldus.TelldusCore()
except OSError:
_LOGGER.exception("Could not initialize Tellstick")
return
sensors = []
datatype_mask = config.get(CONF_DATATYPE_MASK)
if config[CONF_ONLY_NAMED]:
named_sensors = {}
for named_sensor in config[CONF_ONLY_NAMED]:
name = named_sensor[CONF_NAME]
proto = named_sensor.get(CONF_PROTOCOL)
model = named_sensor.get(CONF_MODEL)
id_ = named_sensor[CONF_ID]
if proto is not None:
if model is not None:
named_sensors[f"{proto}{model}{id_}"] = name
else:
named_sensors[f"{proto}{id_}"] = name
else:
named_sensors[id_] = name
for tellcore_sensor in tellcore_lib.sensors():
if not config[CONF_ONLY_NAMED]:
sensor_name = str(tellcore_sensor.id)
else:
proto_id = f"{tellcore_sensor.protocol}{tellcore_sensor.id}"
proto_model_id = (
f"{tellcore_sensor.protocol}{tellcore_sensor.model}{tellcore_sensor.id}"
)
if tellcore_sensor.id in named_sensors:
sensor_name = named_sensors[tellcore_sensor.id]
elif proto_id in named_sensors:
sensor_name = named_sensors[proto_id]
elif proto_model_id in named_sensors:
sensor_name = named_sensors[proto_model_id]
else:
continue
for datatype, sensor_info in sensor_value_descriptions.items():
if datatype & datatype_mask and tellcore_sensor.has_value(datatype):
sensors.append(
TellstickSensor(sensor_name, tellcore_sensor, datatype, sensor_info)
)
add_entities(sensors) |
Set up Tellstick switches. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up Tellstick switches."""
if discovery_info is None or discovery_info[ATTR_DISCOVER_DEVICES] is None:
return
# Allow platform level override, fallback to module config
signal_repetitions = discovery_info.get(
ATTR_DISCOVER_CONFIG, DEFAULT_SIGNAL_REPETITIONS
)
add_entities(
[
TellstickSwitch(hass.data[DATA_TELLSTICK][tellcore_id], signal_repetitions)
for tellcore_id in discovery_info[ATTR_DISCOVER_DEVICES]
],
True,
) |
Set up and send the discovery event. | def _discover(hass, config, component_name, found_tellcore_devices):
"""Set up and send the discovery event."""
if not found_tellcore_devices:
return
_LOGGER.info(
"Discovered %d new %s devices", len(found_tellcore_devices), component_name
)
signal_repetitions = config[DOMAIN].get(CONF_SIGNAL_REPETITIONS)
discovery.load_platform(
hass,
component_name,
DOMAIN,
{
ATTR_DISCOVER_DEVICES: found_tellcore_devices,
ATTR_DISCOVER_CONFIG: signal_repetitions,
},
config,
) |
Set up the Tellstick component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Tellstick component."""
conf = config.get(DOMAIN, {})
net_host = conf.get(CONF_HOST)
net_ports = conf.get(CONF_PORT)
# Initialize remote tellcore client
if net_host:
net_client = TellCoreClient(
host=net_host, port_client=net_ports[0], port_events=net_ports[1]
)
net_client.start()
def stop_tellcore_net(event):
"""Event handler to stop the client."""
net_client.stop()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_tellcore_net)
try:
tellcore_lib = TelldusCore(
callback_dispatcher=AsyncioCallbackDispatcher(hass.loop)
)
except OSError:
_LOGGER.exception("Could not initialize Tellstick")
return False
# Get all devices, switches and lights alike
tellcore_devices = tellcore_lib.devices()
# Register devices
hass.data[DATA_TELLSTICK] = {device.id: device for device in tellcore_devices}
# Discover the lights
_discover(
hass,
config,
"light",
[device.id for device in tellcore_devices if device.methods(TELLSTICK_DIM)],
)
# Discover the cover
_discover(
hass,
config,
"cover",
[device.id for device in tellcore_devices if device.methods(TELLSTICK_UP)],
)
# Discover the switches
_discover(
hass,
config,
"switch",
[
device.id
for device in tellcore_devices
if (not device.methods(TELLSTICK_UP) and not device.methods(TELLSTICK_DIM))
],
)
@callback
def async_handle_callback(tellcore_id, tellcore_command, tellcore_data, cid):
"""Handle the actual callback from Tellcore."""
async_dispatcher_send(
hass, SIGNAL_TELLCORE_CALLBACK, tellcore_id, tellcore_command, tellcore_data
)
# Register callback
callback_id = tellcore_lib.register_device_event(async_handle_callback)
def clean_up_callback(event):
"""Unregister the callback bindings."""
if callback_id is not None:
tellcore_lib.unregister_callback(callback_id)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, clean_up_callback)
return True |
Find and return switches controlled by telnet commands. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Find and return switches controlled by telnet commands."""
devices: dict[str, Any] = config[CONF_SWITCHES]
switches = []
for object_id, device_config in devices.items():
value_template: Template | None = device_config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
switches.append(
TelnetSwitch(
object_id,
device_config[CONF_RESOURCE],
device_config[CONF_PORT],
device_config.get(CONF_NAME, object_id),
device_config[CONF_COMMAND_ON],
device_config[CONF_COMMAND_OFF],
device_config.get(CONF_COMMAND_STATE),
value_template,
device_config[CONF_TIMEOUT],
)
)
if not switches:
_LOGGER.error("No switches added")
return
add_entities(switches) |
Scan the Temper devices from temperusb. | def get_temper_devices():
"""Scan the Temper devices from temperusb."""
return TemperHandler().get_devices() |
Set up the Temper sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Temper sensors."""
prefix = name = config[CONF_NAME]
scaling = {"scale": config.get(CONF_SCALE), "offset": config.get(CONF_OFFSET)}
temper_devices = get_temper_devices()
for idx, dev in enumerate(temper_devices):
if idx != 0:
name = f"{prefix}_{idx!s}"
TEMPER_SENSORS.append(TemperSensor(dev, name, scaling))
add_entities(TEMPER_SENSORS) |
Re-scan for underlying Temper sensors and assign them to our devices.
This assumes the same sensor devices are present in the same order. | def reset_devices():
"""Re-scan for underlying Temper sensors and assign them to our devices.
This assumes the same sensor devices are present in the same order.
"""
temper_devices = get_temper_devices()
for sensor, device in zip(TEMPER_SENSORS, temper_devices, strict=False):
sensor.set_temper_device(device) |
Rewrite legacy binary sensor definitions to modern ones. | def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]:
"""Rewrite legacy binary sensor definitions to modern ones."""
sensors = []
for object_id, entity_cfg in cfg.items():
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS)
if CONF_NAME not in entity_cfg:
entity_cfg[CONF_NAME] = template.Template(object_id)
sensors.append(entity_cfg)
return sensors |
Create the template binary sensors. | def _async_create_template_tracking_entities(
async_add_entities: AddEntitiesCallback,
hass: HomeAssistant,
definitions: list[dict],
unique_id_prefix: str | None,
) -> None:
"""Create the template binary sensors."""
sensors = []
for entity_conf in definitions:
unique_id = entity_conf.get(CONF_UNIQUE_ID)
if unique_id and unique_id_prefix:
unique_id = f"{unique_id_prefix}-{unique_id}"
sensors.append(
BinarySensorTemplate(
hass,
entity_conf,
unique_id,
)
)
async_add_entities(sensors) |
Create a preview sensor. | def async_create_preview_binary_sensor(
hass: HomeAssistant, name: str, config: dict[str, Any]
) -> BinarySensorTemplate:
"""Create a preview sensor."""
validated_config = BINARY_SENSOR_SCHEMA(config | {CONF_NAME: name})
return BinarySensorTemplate(hass, validated_config, None) |
Generate schema. | def generate_schema(domain: str, flow_type: str) -> dict[vol.Marker, Any]:
"""Generate schema."""
schema: dict[vol.Marker, Any] = {}
if domain == Platform.BINARY_SENSOR and flow_type == "config":
schema = {
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[cls.value for cls in BinarySensorDeviceClass],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="binary_sensor_device_class",
sort=True,
),
)
}
if domain == Platform.SENSOR:
schema = {
vol.Optional(CONF_UNIT_OF_MEASUREMENT): selector.SelectSelector(
selector.SelectSelectorConfig(
options=list(
{
str(unit)
for units in DEVICE_CLASS_UNITS.values()
for unit in units
if unit is not None
}
),
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="sensor_unit_of_measurement",
custom_value=True,
sort=True,
),
),
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[
cls.value
for cls in SensorDeviceClass
if cls != SensorDeviceClass.ENUM
],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="sensor_device_class",
sort=True,
),
),
vol.Optional(CONF_STATE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[cls.value for cls in SensorStateClass],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="sensor_state_class",
sort=True,
),
),
}
return schema |
Generate options schema. | def options_schema(domain: str) -> vol.Schema:
"""Generate options schema."""
return vol.Schema(
{vol.Required(CONF_STATE): selector.TemplateSelector()}
| generate_schema(domain, "option"),
) |
Generate config schema. | def config_schema(domain: str) -> vol.Schema:
"""Generate config schema."""
return vol.Schema(
{
vol.Required(CONF_NAME): selector.TextSelector(),
vol.Required(CONF_STATE): selector.TemplateSelector(),
}
| generate_schema(domain, "config"),
) |
Validate unit of measurement. | def _validate_unit(options: dict[str, Any]) -> None:
"""Validate unit of measurement."""
if (
(device_class := options.get(CONF_DEVICE_CLASS))
and (units := DEVICE_CLASS_UNITS.get(device_class)) is not None
and (unit := options.get(CONF_UNIT_OF_MEASUREMENT)) not in units
):
sorted_units = sorted(
[f"'{str(unit)}'" if unit else "no unit of measurement" for unit in units],
key=str.casefold,
)
if len(sorted_units) == 1:
units_string = sorted_units[0]
else:
units_string = f"one of {', '.join(sorted_units)}"
raise vol.Invalid(
f"'{unit}' is not a valid unit for device class '{device_class}'; "
f"expected {units_string}"
) |
Validate state class. | def _validate_state_class(options: dict[str, Any]) -> None:
"""Validate state class."""
if (
(state_class := options.get(CONF_STATE_CLASS))
and (device_class := options.get(CONF_DEVICE_CLASS))
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
and state_class not in state_classes
):
sorted_state_classes = sorted(
[f"'{str(state_class)}'" for state_class in state_classes],
key=str.casefold,
)
if len(sorted_state_classes) == 0:
state_classes_string = "no state class"
elif len(sorted_state_classes) == 1:
state_classes_string = sorted_state_classes[0]
else:
state_classes_string = f"one of {', '.join(sorted_state_classes)}"
raise vol.Invalid(
f"'{state_class}' is not a valid state class for device class "
f"'{device_class}'; expected {state_classes_string}"
) |
Do post validation of user input.
For sensors: Validate unit of measurement.
For all domaines: Set template type. | def validate_user_input(
template_type: str,
) -> Callable[
[SchemaCommonFlowHandler, dict[str, Any]],
Coroutine[Any, Any, dict[str, Any]],
]:
"""Do post validation of user input.
For sensors: Validate unit of measurement.
For all domaines: Set template type.
"""
async def _validate_user_input(
_: SchemaCommonFlowHandler,
user_input: dict[str, Any],
) -> dict[str, Any]:
"""Add template type to user input."""
if template_type == Platform.SENSOR:
_validate_unit(user_input)
_validate_state_class(user_input)
return {"template_type": template_type} | user_input
return _validate_user_input |
Generate a preview. | def ws_start_preview(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Generate a preview."""
def _validate(schema: vol.Schema, domain: str, user_input: dict[str, Any]) -> Any:
errors = {}
key: vol.Marker
for key, validator in schema.schema.items():
if key.schema not in user_input:
continue
try:
validator(user_input[key.schema])
except vol.Invalid as ex:
errors[key.schema] = str(ex.msg)
if domain == Platform.SENSOR:
try:
_validate_unit(user_input)
except vol.Invalid as ex:
errors[CONF_UNIT_OF_MEASUREMENT] = str(ex.msg)
try:
_validate_state_class(user_input)
except vol.Invalid as ex:
errors[CONF_STATE_CLASS] = str(ex.msg)
return errors
entity_registry_entry: er.RegistryEntry | None = None
if msg["flow_type"] == "config_flow":
flow_status = hass.config_entries.flow.async_get(msg["flow_id"])
template_type = flow_status["step_id"]
form_step = cast(SchemaFlowFormStep, CONFIG_FLOW[template_type])
schema = cast(vol.Schema, form_step.schema)
name = msg["user_input"]["name"]
else:
flow_status = hass.config_entries.options.async_get(msg["flow_id"])
config_entry = hass.config_entries.async_get_entry(flow_status["handler"])
if not config_entry:
raise HomeAssistantError
template_type = config_entry.options["template_type"]
name = config_entry.options["name"]
schema = cast(vol.Schema, OPTIONS_FLOW[template_type].schema)
entity_registry = er.async_get(hass)
entries = er.async_entries_for_config_entry(
entity_registry, flow_status["handler"]
)
if entries:
entity_registry_entry = entries[0]
errors = _validate(schema, template_type, msg["user_input"])
@callback
def async_preview_updated(
state: str | None,
attributes: Mapping[str, Any] | None,
listeners: dict[str, bool | set[str]] | None,
error: str | None,
) -> None:
"""Forward config entry state events to websocket."""
if error is not None:
connection.send_message(
websocket_api.event_message(
msg["id"],
{"error": error},
)
)
return
connection.send_message(
websocket_api.event_message(
msg["id"],
{"attributes": attributes, "listeners": listeners, "state": state},
)
)
if errors:
connection.send_message(
{
"id": msg["id"],
"type": websocket_api.const.TYPE_RESULT,
"success": False,
"error": {"code": "invalid_user_input", "message": errors},
}
)
return
preview_entity = CREATE_PREVIEW_ENTITY[template_type](hass, name, msg["user_input"])
preview_entity.hass = hass
preview_entity.registry_entry = entity_registry_entry
connection.send_result(msg["id"])
connection.subscriptions[msg["id"]] = preview_entity.async_start_preview(
async_preview_updated
) |
Run extra validation checks. | def validate_last_reset(val):
"""Run extra validation checks."""
if (
val.get(ATTR_LAST_RESET) is not None
and val.get(CONF_STATE_CLASS) != SensorStateClass.TOTAL
):
raise vol.Invalid(
"last_reset is only valid for template sensors with state_class 'total'"
)
return val |
Run extra validation checks. | def extra_validation_checks(val):
"""Run extra validation checks."""
if CONF_TRIGGER in val:
raise vol.Invalid(
"You can only add triggers to template entities if they are defined under"
" `template:`. See the template documentation for more information:"
" https://www.home-assistant.io/integrations/template/"
)
if CONF_SENSORS not in val and SENSOR_DOMAIN not in val:
raise vol.Invalid(f"Required key {SENSOR_DOMAIN} not defined")
return val |
Rewrite legacy sensor definitions to modern ones. | def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]:
"""Rewrite legacy sensor definitions to modern ones."""
sensors = []
for object_id, entity_cfg in cfg.items():
entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id}
entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS)
if CONF_NAME not in entity_cfg:
entity_cfg[CONF_NAME] = template.Template(object_id)
sensors.append(entity_cfg)
return sensors |
Create the template sensors. | def _async_create_template_tracking_entities(
async_add_entities: AddEntitiesCallback,
hass: HomeAssistant,
definitions: list[dict],
unique_id_prefix: str | None,
) -> None:
"""Create the template sensors."""
sensors = []
for entity_conf in definitions:
unique_id = entity_conf.get(CONF_UNIQUE_ID)
if unique_id and unique_id_prefix:
unique_id = f"{unique_id_prefix}-{unique_id}"
sensors.append(
SensorTemplate(
hass,
entity_conf,
unique_id,
)
)
async_add_entities(sensors) |
Create a preview sensor. | def async_create_preview_sensor(
hass: HomeAssistant, name: str, config: dict[str, Any]
) -> SensorTemplate:
"""Create a preview sensor."""
validated_config = SENSOR_SCHEMA(config | {CONF_NAME: name})
return SensorTemplate(hass, validated_config, None) |
Return a schema with default name. | def make_template_entity_common_schema(default_name: str) -> vol.Schema:
"""Return a schema with default name."""
return vol.Schema(
{
vol.Optional(CONF_ATTRIBUTES): vol.Schema({cv.string: cv.template}),
vol.Optional(CONF_AVAILABILITY): cv.template,
}
).extend(make_template_entity_base_schema(default_name).schema) |
Rewrite legacy config. | def rewrite_common_legacy_to_modern_conf(
entity_cfg: dict[str, Any], extra_legacy_fields: dict[str, str] | None = None
) -> dict[str, Any]:
"""Rewrite legacy config."""
entity_cfg = {**entity_cfg}
if extra_legacy_fields is None:
extra_legacy_fields = {}
for from_key, to_key in itertools.chain(
LEGACY_FIELDS.items(), extra_legacy_fields.items()
):
if from_key not in entity_cfg or to_key in entity_cfg:
continue
val = entity_cfg.pop(from_key)
if isinstance(val, str):
val = Template(val)
entity_cfg[to_key] = val
if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str):
entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME])
return entity_cfg |
Get a tf.function for detection. | def get_model_detection_function(model):
"""Get a tf.function for detection."""
@tf.function
def detect_fn(image):
"""Detect objects in image."""
image, shapes = model.preprocess(image)
prediction_dict = model.predict(image, shapes)
return model.postprocess(prediction_dict, shapes)
return detect_fn |
Set up the TensorFlow image processing platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the TensorFlow image processing platform."""
model_config = config[CONF_MODEL]
model_dir = model_config.get(CONF_MODEL_DIR) or hass.config.path("tensorflow")
labels = model_config.get(CONF_LABELS) or hass.config.path(
"tensorflow", "object_detection", "data", "mscoco_label_map.pbtxt"
)
checkpoint = os.path.join(model_config[CONF_GRAPH], "checkpoint")
pipeline_config = os.path.join(model_config[CONF_GRAPH], "pipeline.config")
# Make sure locations exist
if (
not os.path.isdir(model_dir)
or not os.path.isdir(checkpoint)
or not os.path.exists(pipeline_config)
or not os.path.exists(labels)
):
_LOGGER.error("Unable to locate tensorflow model or label map")
return
# append custom model path to sys.path
sys.path.append(model_dir)
try:
# Verify that the TensorFlow Object Detection API is pre-installed
# These imports shouldn't be moved to the top, because they depend on code from the model_dir.
# (The model_dir is created during the manual setup process. See integration docs.)
# pylint: disable=import-outside-toplevel
from object_detection.builders import model_builder
from object_detection.utils import config_util, label_map_util
except ImportError:
_LOGGER.error(
"No TensorFlow Object Detection library found! Install or compile "
"for your system following instructions here: "
"https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/tf2.md#installation"
)
return
try:
# Display warning that PIL will be used if no OpenCV is found.
import cv2 # noqa: F401 pylint: disable=import-outside-toplevel
except ImportError:
_LOGGER.warning(
"No OpenCV library found. TensorFlow will process image with "
"PIL at reduced resolution"
)
hass.data[DOMAIN] = {CONF_MODEL: None}
def tensorflow_hass_start(_event):
"""Set up TensorFlow model on hass start."""
start = time.perf_counter()
# Load pipeline config and build a detection model
pipeline_configs = config_util.get_configs_from_pipeline_file(pipeline_config)
detection_model = model_builder.build(
model_config=pipeline_configs["model"], is_training=False
)
# Restore checkpoint
ckpt = tf.compat.v2.train.Checkpoint(model=detection_model)
ckpt.restore(os.path.join(checkpoint, "ckpt-0")).expect_partial()
_LOGGER.debug(
"Model checkpoint restore took %d seconds", time.perf_counter() - start
)
model = get_model_detection_function(detection_model)
# Preload model cache with empty image tensor
inp = np.zeros([2160, 3840, 3], dtype=np.uint8)
# The input needs to be a tensor, convert it using `tf.convert_to_tensor`.
input_tensor = tf.convert_to_tensor(inp, dtype=tf.float32)
# The model expects a batch of images, so add an axis with `tf.newaxis`.
input_tensor = input_tensor[tf.newaxis, ...]
# Run inference
model(input_tensor)
_LOGGER.debug("Model load took %d seconds", time.perf_counter() - start)
hass.data[DOMAIN][CONF_MODEL] = model
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, tensorflow_hass_start)
category_index = label_map_util.create_category_index_from_labelmap(
labels, use_display_name=True
)
add_entities(
TensorFlowImageProcessor(
hass,
camera[CONF_ENTITY_ID],
camera.get(CONF_NAME),
category_index,
config,
)
for camera in config[CONF_SOURCE]
) |
Get the poll interval from config. | def get_poll_interval(entry: ConfigEntry) -> timedelta:
"""Get the poll interval from config."""
return timedelta(
seconds=entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
) |
Get a unique entity name. | def get_unique_id(serial_number: str, key: str) -> str:
"""Get a unique entity name."""
return f"{serial_number}-{key}" |
Handle wake up and errors. | def handle_command():
"""Handle wake up and errors."""
try:
yield
except TeslaFleetError as e:
raise HomeAssistantError("Teslemetry command failed") from e |
Convert relative minutes into absolute datetime. | def minutes_to_datetime(value: StateType) -> datetime | None:
"""Convert relative minutes into absolute datetime."""
if isinstance(value, (int, float)) and value > 0:
return dt_util.now() + timedelta(minutes=value)
return None |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Set up the thermoworks sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the thermoworks sensor."""
email = config[CONF_EMAIL]
password = config[CONF_PASSWORD]
monitored_variables = config[CONF_MONITORED_CONDITIONS]
excluded = config[CONF_EXCLUDE]
try:
mgr = thermoworks_smoke.initialize_app(email, password, True, excluded)
except HTTPError as error:
msg = f"{error.strerror}"
if "EMAIL_NOT_FOUND" in msg or "INVALID_PASSWORD" in msg:
_LOGGER.error("Invalid email and password combination")
else:
_LOGGER.error(msg)
else:
add_entities(
(
ThermoworksSmokeSensor(variable, serial, mgr)
for serial in mgr.serials()
for variable in monitored_variables
),
True,
) |
Set up the Thingspeak environment. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Thingspeak environment."""
conf = config[DOMAIN]
api_key = conf.get(CONF_API_KEY)
channel_id = conf.get(CONF_ID)
entity = conf.get(CONF_WHITELIST)
try:
channel = thingspeak.Channel(channel_id, api_key=api_key, timeout=TIMEOUT)
channel.get()
except RequestException:
_LOGGER.error(
"Error while accessing the ThingSpeak channel. "
"Please check that the channel exists and your API key is correct"
)
return False
def thingspeak_listener(entity_id, old_state, new_state):
"""Listen for new events and send them to Thingspeak."""
if new_state is None or new_state.state in (
STATE_UNKNOWN,
"",
STATE_UNAVAILABLE,
):
return
try:
if new_state.entity_id != entity:
return
_state = state_helper.state_as_number(new_state)
except ValueError:
return
try:
channel.update({"field1": _state})
except RequestException:
_LOGGER.error("Error while sending value '%s' to Thingspeak", _state)
event.track_state_change(hass, entity, thingspeak_listener)
return True |
Set up the ThinkingCleaner platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the ThinkingCleaner platform."""
if host := config.get(CONF_HOST):
devices = [ThinkingCleaner(host, "unknown")]
else:
discovery = Discovery()
devices = discovery.discover()
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
"""Update all devices."""
for device_object in devices:
device_object.update()
entities = [
ThinkingCleanerSensor(device, update_devices, description)
for device in devices
for description in SENSOR_TYPES
]
add_entities(entities) |
Set up the ThinkingCleaner platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the ThinkingCleaner platform."""
if host := config.get(CONF_HOST):
devices = [ThinkingCleaner(host, "unknown")]
else:
discovery = Discovery()
devices = discovery.discover()
@util.Throttle(MIN_TIME_BETWEEN_SCANS, MIN_TIME_BETWEEN_FORCED_SCANS)
def update_devices():
"""Update all devices."""
for device_object in devices:
device_object.update()
entities = [
ThinkingCleanerSwitch(device, update_devices, description)
for device in devices
for description in SWITCH_TYPES
]
add_entities(entities) |
Validate the configuration and return a THOMSON scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> ThomsonDeviceScanner | None:
"""Validate the configuration and return a THOMSON scanner."""
scanner = ThomsonDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
Get the routes and neighbours from pyroute2. | def _get_routes_and_neighbors():
"""Get the routes and neighbours from pyroute2."""
# Import in the executor since import NDB can take a while
from pyroute2 import ( # pylint: disable=no-name-in-module, import-outside-toplevel
NDB,
)
with NDB() as ndb:
routes, reverse_routes = _get_possible_thread_routes(ndb)
neighbours = _get_neighbours(ndb)
return routes, reverse_routes, neighbours |
Get a ThreadRouterDiscoveryData from an AsyncServiceInfo. | def async_discovery_data_from_service(
service: AsyncServiceInfo,
ext_addr: bytes,
ext_pan_id: bytes,
) -> ThreadRouterDiscoveryData:
"""Get a ThreadRouterDiscoveryData from an AsyncServiceInfo."""
def try_decode(value: bytes | None) -> str | None:
"""Try decoding UTF-8."""
if value is None:
return None
try:
return value.decode()
except UnicodeDecodeError:
return None
service_properties = service.properties
border_agent_id = service_properties.get(b"id")
model_name = try_decode(service_properties.get(b"mn"))
network_name = try_decode(service_properties.get(b"nn"))
server = service.server
thread_version = try_decode(service_properties.get(b"tv"))
vendor_name = try_decode(service_properties.get(b"vn"))
unconfigured = None
brand = KNOWN_BRANDS.get(vendor_name)
if brand == "homeassistant":
# Attempt to detect incomplete configuration
if (state_bitmap_b := service_properties.get(b"sb")) is not None:
try:
state_bitmap = StateBitmap.from_bytes(state_bitmap_b)
if not state_bitmap.is_active:
unconfigured = True
except ValueError:
_LOGGER.debug("Failed to decode state bitmap in service %s", service)
if service_properties.get(b"at") is None:
unconfigured = True
return ThreadRouterDiscoveryData(
addresses=service.parsed_addresses(),
border_agent_id=border_agent_id.hex() if border_agent_id is not None else None,
brand=brand,
extended_address=ext_addr.hex(),
extended_pan_id=ext_pan_id.hex(),
model_name=model_name,
network_name=network_name,
server=server,
thread_version=thread_version,
unconfigured=unconfigured,
vendor_name=vendor_name,
) |
Return all meshcop records already in the zeroconf cache. | def async_read_zeroconf_cache(aiozc: AsyncZeroconf) -> list[ThreadRouterDiscoveryData]:
"""Return all meshcop records already in the zeroconf cache."""
results = []
records = aiozc.zeroconf.cache.async_all_by_details(THREAD_TYPE, TYPE_PTR, CLASS_IN)
for record in records:
record = cast(DNSPointer, record)
try:
info = AsyncServiceInfo(THREAD_TYPE, record.alias)
except BadTypeInNameException as ex:
_LOGGER.debug(
"Ignoring record with bad type in name: %s: %s", record.alias, ex
)
continue
if not info.load_from_cache(aiozc.zeroconf):
# data is not fully in the cache, so ignore for now
continue
service_properties = info.properties
if not (xa := service_properties.get(b"xa")):
_LOGGER.debug("Ignoring record without xa %s", info)
continue
if not (xp := service_properties.get(b"xp")):
_LOGGER.debug("Ignoring record without xp %s", info)
continue
results.append(async_discovery_data_from_service(info, xa, xp))
return results |
Set up the sensor websocket API. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the sensor websocket API."""
websocket_api.async_register_command(hass, ws_add_dataset)
websocket_api.async_register_command(hass, ws_delete_dataset)
websocket_api.async_register_command(hass, ws_discover_routers)
websocket_api.async_register_command(hass, ws_get_dataset)
websocket_api.async_register_command(hass, ws_list_datasets)
websocket_api.async_register_command(hass, ws_set_preferred_border_agent)
websocket_api.async_register_command(hass, ws_set_preferred_dataset) |
Return the type of threshold this sensor represents. | def _threshold_type(lower: float | None, upper: float | None) -> str:
"""Return the type of threshold this sensor represents."""
if lower is not None and upper is not None:
return TYPE_RANGE
if lower is not None:
return TYPE_LOWER
return TYPE_UPPER |
Set up the Tikteck platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Tikteck platform."""
lights = []
for address, device_config in config[CONF_DEVICES].items():
device = {}
device["name"] = device_config[CONF_NAME]
device["password"] = device_config[CONF_PASSWORD]
device["address"] = address
light = TikteckLight(device)
if light.is_valid:
lights.append(light)
add_entities(lights) |
Convert a device key to an entity key. | def _device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
_device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class and description.native_unit_of_measurement
},
entity_data={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
_device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Set up the sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the sensors."""
ibus_client = IBus(config[CONF_APP_ID], config[CONF_APP_KEY])
sensors = []
for line_stop in config[CONF_BUS_STOPS]:
line = line_stop[CONF_LINE]
stop = line_stop[CONF_BUS_STOP]
if line_stop.get(CONF_NAME):
name = f"{line} - {line_stop[CONF_NAME]} ({stop})"
else:
name = f"{line} - {stop}"
sensors.append(TMBSensor(ibus_client, stop, line, name))
add_entities(sensors, True) |
Return true if event is sun event not time. | def _is_sun_event(sun_event: time | SunEventType) -> TypeGuard[SunEventType]:
"""Return true if event is sun event not time."""
return sun_event in (SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET) |
Validate service call fields against entity supported features. | def _validate_supported_features(
supported_features: int | None, call_data: dict[str, Any]
) -> None:
"""Validate service call fields against entity supported features."""
for desc in TODO_ITEM_FIELDS:
if desc.service_field not in call_data:
continue
if not supported_features or not supported_features & desc.required_feature:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="update_field_not_supported",
translation_placeholders={"service_field": desc.service_field},
) |
Convert CalendarEvent dataclass items to dictionary of attributes. | def _api_items_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
"""Convert CalendarEvent dataclass items to dictionary of attributes."""
result: dict[str, str] = {}
for name, value in obj:
if value is None:
continue
if isinstance(value, (datetime.date, datetime.datetime)):
result[name] = value.isoformat()
else:
result[name] = str(value)
return result |
Find a To-do List item by uid or summary name. | def _find_by_uid_or_summary(
value: str, items: list[TodoItem] | None
) -> TodoItem | None:
"""Find a To-do List item by uid or summary name."""
for item in items or ():
if value in (item.uid, item.summary):
return item
return None |
Register services. | def async_register_services(
hass: HomeAssistant, coordinator: TodoistCoordinator
) -> None:
"""Register services."""
if hass.services.has_service(DOMAIN, SERVICE_NEW_TASK):
return
session = async_get_clientsession(hass)
async def handle_new_task(call: ServiceCall) -> None:
"""Call when a user creates a new Todoist Task from Home Assistant."""
project_name = call.data[PROJECT_NAME].lower()
projects = await coordinator.async_get_projects()
project_id: str | None = None
for project in projects:
if project_name == project.name.lower():
project_id = project.id
if project_id is None:
raise HomeAssistantError(f"Invalid project name '{project_name}'")
# Create the task
content = call.data[CONTENT]
data: dict[str, Any] = {"project_id": project_id}
if task_labels := call.data.get(LABELS):
data["labels"] = task_labels
if ASSIGNEE in call.data:
collaborators = await coordinator.api.get_collaborators(project_id)
collaborator_id_lookup = {
collab.name.lower(): collab.id for collab in collaborators
}
task_assignee = call.data[ASSIGNEE].lower()
if task_assignee in collaborator_id_lookup:
data["assignee_id"] = collaborator_id_lookup[task_assignee]
else:
raise ValueError(
f"User is not part of the shared project. user: {task_assignee}"
)
if PRIORITY in call.data:
data["priority"] = call.data[PRIORITY]
if DUE_DATE_STRING in call.data:
data["due_string"] = call.data[DUE_DATE_STRING]
if DUE_DATE_LANG in call.data:
data["due_lang"] = call.data[DUE_DATE_LANG]
if DUE_DATE in call.data:
due_date = dt_util.parse_datetime(call.data[DUE_DATE])
if due_date is None:
due = dt_util.parse_date(call.data[DUE_DATE])
if due is None:
raise ValueError(f"Invalid due_date: {call.data[DUE_DATE]}")
due_date = datetime(due.year, due.month, due.day)
# Format it in the manner Todoist expects
due_date = dt_util.as_utc(due_date)
date_format = "%Y-%m-%dT%H:%M:%S"
data["due_datetime"] = datetime.strftime(due_date, date_format)
api_task = await coordinator.api.add_task(content, **data)
# @NOTE: The rest-api doesn't support reminders, this works manually using
# the sync api, in order to keep functional parity with the component.
# https://developer.todoist.com/sync/v9/#reminders
sync_url = get_sync_url("sync")
_reminder_due: dict = {}
if REMINDER_DATE_STRING in call.data:
_reminder_due["string"] = call.data[REMINDER_DATE_STRING]
if REMINDER_DATE_LANG in call.data:
_reminder_due["lang"] = call.data[REMINDER_DATE_LANG]
if REMINDER_DATE in call.data:
due_date = dt_util.parse_datetime(call.data[REMINDER_DATE])
if due_date is None:
due = dt_util.parse_date(call.data[REMINDER_DATE])
if due is None:
raise ValueError(
f"Invalid reminder_date: {call.data[REMINDER_DATE]}"
)
due_date = datetime(due.year, due.month, due.day)
# Format it in the manner Todoist expects
due_date = dt_util.as_utc(due_date)
date_format = "%Y-%m-%dT%H:%M:%S"
_reminder_due["date"] = datetime.strftime(due_date, date_format)
async def add_reminder(reminder_due: dict):
reminder_data = {
"commands": [
{
"type": "reminder_add",
"temp_id": str(uuid.uuid1()),
"uuid": str(uuid.uuid1()),
"args": {"item_id": api_task.id, "due": reminder_due},
}
]
}
headers = create_headers(token=coordinator.token, with_content=True)
return await session.post(sync_url, headers=headers, json=reminder_data)
if _reminder_due:
await add_reminder(_reminder_due)
_LOGGER.debug("Created Todoist task: %s", call.data[CONTENT])
hass.services.async_register(
DOMAIN, SERVICE_NEW_TASK, handle_new_task, schema=NEW_TASK_SERVICE_SCHEMA
) |
Return the task due date as a start date or date time. | def get_start(due: Due) -> datetime | date | None:
"""Return the task due date as a start date or date time."""
if due.datetime:
start = dt_util.parse_datetime(due.datetime)
if not start:
return None
return dt_util.as_local(start)
if due.date:
return dt_util.parse_date(due.date)
return None |
Convert a TodoItem to the set of add or update arguments. | def _task_api_data(item: TodoItem, api_data: Task | None = None) -> dict[str, Any]:
"""Convert a TodoItem to the set of add or update arguments."""
item_data: dict[str, Any] = {
"content": item.summary,
# Description needs to be empty string to be cleared
"description": item.description or "",
}
if due := item.due:
if isinstance(due, datetime.datetime):
item_data["due_datetime"] = due.isoformat()
else:
item_data["due_date"] = due.isoformat()
# In order to not lose any recurrence metadata for the task, we need to
# ensure that we send the `due_string` param if the task has it set.
# NOTE: It's ok to send stale data for non-recurring tasks. Any provided
# date/datetime will override this string.
if api_data and api_data.due:
item_data["due_string"] = api_data.due.string
else:
# Special flag "no date" clears the due date/datetime.
# See https://developer.todoist.com/rest/v2/#update-a-task for more.
item_data["due_string"] = "no date"
return item_data |
Validate the configuration and returns a Tomato scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> TomatoDeviceScanner:
"""Validate the configuration and returns a Tomato scanner."""
return TomatoDeviceScanner(config[DOMAIN]) |
Return schema defaults for init step based on user input/config dict.
Retain info already provided for future form views by setting them as
defaults in schema. | def _get_config_schema(
hass: HomeAssistant,
source: str | None,
input_dict: dict[str, Any] | None = None,
) -> vol.Schema:
"""Return schema defaults for init step based on user input/config dict.
Retain info already provided for future form views by setting them as
defaults in schema.
"""
if input_dict is None:
input_dict = {}
api_key_schema = {
vol.Required(CONF_API_KEY, default=input_dict.get(CONF_API_KEY)): str,
}
default_location = input_dict.get(
CONF_LOCATION,
{
CONF_LATITUDE: hass.config.latitude,
CONF_LONGITUDE: hass.config.longitude,
},
)
return vol.Schema(
{
**api_key_schema,
vol.Required(
CONF_LOCATION,
default=default_location,
): LocationSelector(LocationSelectorConfig(radius=False)),
},
) |
Return unique ID from config data. | def _get_unique_id(hass: HomeAssistant, input_dict: dict[str, Any]):
"""Return unique ID from config data."""
return (
f"{input_dict[CONF_API_KEY]}"
f"_{input_dict[CONF_LOCATION][CONF_LATITUDE]}"
f"_{input_dict[CONF_LOCATION][CONF_LONGITUDE]}"
) |
Return function to convert ppb to ug/m^3. | def convert_ppb_to_ugm3(molecular_weight: float) -> Callable[[float], float]:
"""Return function to convert ppb to ug/m^3."""
return lambda x: (x * molecular_weight) / 24.45 |
Handle conversion of a value based on conversion type. | def handle_conversion(
value: float, conversion: Callable[[float], float] | float
) -> float:
"""Handle conversion of a value based on conversion type."""
if callable(conversion):
return round(conversion(float(value)), 2)
return round(float(value) * conversion, 2) |
Calculate unique ID. | def _calculate_unique_id(config_entry_unique_id: str | None, forecast_type: str) -> str:
"""Calculate unique ID."""
return f"{config_entry_unique_id}_{forecast_type}" |
Get all entries for a given API key. | def async_get_entries_by_api_key(
hass: HomeAssistant, api_key: str, exclude_entry: ConfigEntry | None = None
) -> list[ConfigEntry]:
"""Get all entries for a given API key."""
return [
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data[CONF_API_KEY] == api_key
and (exclude_entry is None or exclude_entry != entry)
] |
Calculate update_interval. | def async_set_update_interval(
hass: HomeAssistant, api: TomorrowioV4, exclude_entry: ConfigEntry | None = None
) -> timedelta:
"""Calculate update_interval."""
# We check how many Tomorrow.io configured instances are using the same API key and
# calculate interval to not exceed allowed numbers of requests. Divide 90% of
# max_requests by the number of API calls because we want a buffer in the
# number of API calls left at the end of the day.
entries = async_get_entries_by_api_key(hass, api.api_key, exclude_entry)
minutes = ceil(
(24 * 60 * len(entries) * api.num_api_requests)
/ (api.max_requests_per_day * 0.9)
)
LOGGER.debug(
(
"Number of config entries: %s\n"
"Number of API Requests per call: %s\n"
"Max requests per day: %s\n"
"Update interval: %s minutes"
),
len(entries),
api.num_api_requests,
api.max_requests_per_day,
minutes,
)
return timedelta(minutes=minutes) |
Decorate Toon calls to handle Toon exceptions.
A decorator that wraps the passed in function, catches Toon errors,
and handles the availability of the device in the data coordinator. | def toon_exception_handler(
func: Callable[Concatenate[_ToonEntityT, _P], Coroutine[Any, Any, None]],
) -> Callable[Concatenate[_ToonEntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate Toon calls to handle Toon exceptions.
A decorator that wraps the passed in function, catches Toon errors,
and handles the availability of the device in the data coordinator.
"""
async def handler(self: _ToonEntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
try:
await func(self, *args, **kwargs)
self.coordinator.async_update_listeners()
except ToonConnectionError as error:
_LOGGER.error("Error communicating with API: %s", error)
self.coordinator.last_update_success = False
self.coordinator.async_update_listeners()
except ToonError as error:
_LOGGER.error("Invalid response from API: %s", error)
return handler |
Register Toon OAuth2 implementations. | def register_oauth2_implementations(
hass: HomeAssistant, client_id: str, client_secret: str
) -> None:
"""Register Toon OAuth2 implementations."""
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Eneco Toon",
tenant_id="eneco",
issuer="identity.toon.eu",
),
)
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Engie Electrabel Boxx",
tenant_id="electrabel",
issuer="identity.toon.eu",
),
)
config_flow.ToonFlowHandler.async_register_implementation(
hass,
ToonLocalOAuth2Implementation(
hass,
client_id=client_id,
client_secret=client_secret,
name="Viesgo",
tenant_id="viesgo",
),
) |
Convert pid from hex string to integer. | def convert_pid(value):
"""Convert pid from hex string to integer."""
return int(value, 16) |
Return the device class of a TotalConnect security zone. | def get_security_zone_device_class(zone: TotalConnectZone) -> BinarySensorDeviceClass:
"""Return the device class of a TotalConnect security zone."""
if zone.is_type_fire():
return BinarySensorDeviceClass.SMOKE
if zone.is_type_carbon_monoxide():
return BinarySensorDeviceClass.GAS
if zone.is_type_motion():
return BinarySensorDeviceClass.MOTION
if zone.is_type_medical():
return BinarySensorDeviceClass.SAFETY
if zone.is_type_temperature():
return BinarySensorDeviceClass.PROBLEM
return BinarySensorDeviceClass.DOOR |
Set up the Touchline devices. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Touchline devices."""
host = config[CONF_HOST]
py_touchline = PyTouchline()
number_of_devices = int(py_touchline.get_number_of_devices(host))
add_entities(
(Touchline(PyTouchline(device_id)) for device_id in range(number_of_devices)),
True,
) |
Define a wrapper to raise HA errors and refresh after. | def async_refresh_after(
func: Callable[Concatenate[_T, _P], Awaitable[None]],
) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]:
"""Define a wrapper to raise HA errors and refresh after."""
async def _async_wrap(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None:
try:
await func(self, *args, **kwargs)
except AuthenticationException as ex:
self.coordinator.config_entry.async_start_reauth(self.hass)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="device_authentication",
translation_placeholders={
"func": func.__name__,
"exc": str(ex),
},
) from ex
except TimeoutException as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="device_timeout",
translation_placeholders={
"func": func.__name__,
"exc": str(ex),
},
) from ex
except SmartDeviceException as ex:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="device_error",
translation_placeholders={
"func": func.__name__,
"exc": str(ex),
},
) from ex
await self.coordinator.async_request_refresh()
return _async_wrap |
Map a sensor key to the device attribute. | def async_emeter_from_device(
device: SmartDevice, description: TPLinkSensorEntityDescription
) -> float | None:
"""Map a sensor key to the device attribute."""
if attr := description.emeter_attr:
if (val := getattr(device.emeter_realtime, attr)) is None:
return None
return round(cast(float, val), description.precision)
# ATTR_TODAY_ENERGY_KWH
if (emeter_today := device.emeter_today) is not None:
return round(cast(float, emeter_today), description.precision)
# today's consumption not available, when device was off all the day
# bulb's do not report this information, so filter it out
return None if device.is_bulb else 0.0 |
Generate the sensors for the device. | def _async_sensors_for_device(
device: SmartDevice,
coordinator: TPLinkDataUpdateCoordinator,
has_parent: bool = False,
) -> list[SmartPlugSensor]:
"""Generate the sensors for the device."""
return [
SmartPlugSensor(device, coordinator, description, has_parent)
for description in ENERGY_SENSORS
if async_emeter_from_device(device, description) is not None
] |
Return aiohttp clientsession with cookie jar configured. | def create_async_tplink_clientsession(hass: HomeAssistant) -> ClientSession:
"""Return aiohttp clientsession with cookie jar configured."""
return async_create_clientsession(
hass, verify_ssl=False, cookie_jar=get_cookie_jar()
) |
Trigger config flows for discovered devices. | def async_trigger_discovery(
hass: HomeAssistant,
discovered_devices: dict[str, SmartDevice],
) -> None:
"""Trigger config flows for discovered devices."""
for formatted_mac, device in discovered_devices.items():
discovery_flow.async_create_flow(
hass,
DOMAIN,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data={
CONF_ALIAS: device.alias or mac_alias(device.mac),
CONF_HOST: device.host,
CONF_MAC: formatted_mac,
CONF_DEVICE_CONFIG: device.config.to_dict(
credentials_hash=device.credentials_hash,
exclude_credentials=True,
),
},
) |
Convert the device id so it matches what was used in the original version. | def legacy_device_id(device: SmartDevice) -> str:
"""Convert the device id so it matches what was used in the original version."""
device_id: str = device.device_id
# Plugs are prefixed with the mac in python-kasa but not
# in pyHS100 so we need to strip off the mac
if "_" not in device_id:
return device_id
return device_id.split("_")[1] |
Convert a MAC address to a short address for the UI. | def mac_alias(mac: str) -> str:
"""Convert a MAC address to a short address for the UI."""
return mac.replace(":", "")[-4:].upper() |
Get display name for a switch port. | def _get_switch_port_base_name(port: OmadaSwitchPortDetails) -> str:
"""Get display name for a switch port."""
if port.name == f"Port{port.port}":
return str(port.port)
return f"{port.port} ({port.name})" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.