response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Register services for the component. | def register_services(hass: HomeAssistant) -> None:
"""Register services for the component."""
service_reset_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
)
}
)
service_set_central_heating_ovrd_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_CH_OVRD): cv.boolean,
}
)
service_set_clock_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Optional(ATTR_DATE, default=date.today): cv.date,
vol.Optional(ATTR_TIME, default=lambda: datetime.now().time()): cv.time,
}
)
service_set_control_setpoint_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_TEMPERATURE): vol.All(
vol.Coerce(float), vol.Range(min=0, max=90)
),
}
)
service_set_hot_water_setpoint_schema = service_set_control_setpoint_schema
service_set_hot_water_ovrd_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_DHW_OVRD): vol.Any(
vol.Equal("A"), vol.All(vol.Coerce(int), vol.Range(min=0, max=1))
),
}
)
service_set_gpio_mode_schema = vol.Schema(
vol.Any(
vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_ID): vol.Equal("A"),
vol.Required(ATTR_MODE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=6)
),
}
),
vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_ID): vol.Equal("B"),
vol.Required(ATTR_MODE): vol.All(
vol.Coerce(int), vol.Range(min=0, max=7)
),
}
),
)
)
service_set_led_mode_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_ID): vol.In("ABCDEF"),
vol.Required(ATTR_MODE): vol.In("RXTBOFHWCEMP"),
}
)
service_set_max_mod_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_LEVEL): vol.All(
vol.Coerce(int), vol.Range(min=-1, max=100)
),
}
)
service_set_oat_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_TEMPERATURE): vol.All(
vol.Coerce(float), vol.Range(min=-40, max=99)
),
}
)
service_set_sb_temp_schema = vol.Schema(
{
vol.Required(ATTR_GW_ID): vol.All(
cv.string, vol.In(hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS])
),
vol.Required(ATTR_TEMPERATURE): vol.All(
vol.Coerce(float), vol.Range(min=0, max=30)
),
}
)
async def reset_gateway(call: ServiceCall) -> None:
"""Reset the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
mode_rst = gw_vars.OTGW_MODE_RESET
await gw_dev.gateway.set_mode(mode_rst)
hass.services.async_register(
DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema
)
async def set_ch_ovrd(call: ServiceCall) -> None:
"""Set the central heating override on the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0)
hass.services.async_register(
DOMAIN,
SERVICE_SET_CH_OVRD,
set_ch_ovrd,
service_set_central_heating_ovrd_schema,
)
async def set_control_setpoint(call: ServiceCall) -> None:
"""Set the control setpoint on the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE])
hass.services.async_register(
DOMAIN,
SERVICE_SET_CONTROL_SETPOINT,
set_control_setpoint,
service_set_control_setpoint_schema,
)
async def set_dhw_ovrd(call: ServiceCall) -> None:
"""Set the domestic hot water override on the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD])
hass.services.async_register(
DOMAIN,
SERVICE_SET_HOT_WATER_OVRD,
set_dhw_ovrd,
service_set_hot_water_ovrd_schema,
)
async def set_dhw_setpoint(call: ServiceCall) -> None:
"""Set the domestic hot water setpoint on the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE])
hass.services.async_register(
DOMAIN,
SERVICE_SET_HOT_WATER_SETPOINT,
set_dhw_setpoint,
service_set_hot_water_setpoint_schema,
)
async def set_device_clock(call: ServiceCall) -> None:
"""Set the clock on the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
attr_date = call.data[ATTR_DATE]
attr_time = call.data[ATTR_TIME]
await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time))
hass.services.async_register(
DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema
)
async def set_gpio_mode(call: ServiceCall) -> None:
"""Set the OpenTherm Gateway GPIO modes."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
gpio_id = call.data[ATTR_ID]
gpio_mode = call.data[ATTR_MODE]
await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode)
hass.services.async_register(
DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema
)
async def set_led_mode(call: ServiceCall) -> None:
"""Set the OpenTherm Gateway LED modes."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
led_id = call.data[ATTR_ID]
led_mode = call.data[ATTR_MODE]
await gw_dev.gateway.set_led_mode(led_id, led_mode)
hass.services.async_register(
DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema
)
async def set_max_mod(call: ServiceCall) -> None:
"""Set the max modulation level."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
level = call.data[ATTR_LEVEL]
if level == -1:
# Backend only clears setting on non-numeric values.
level = "-"
await gw_dev.gateway.set_max_relative_mod(level)
hass.services.async_register(
DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema
)
async def set_outside_temp(call: ServiceCall) -> None:
"""Provide the outside temperature to the OpenTherm Gateway."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE])
hass.services.async_register(
DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema
)
async def set_setback_temp(call: ServiceCall) -> None:
"""Set the OpenTherm Gateway SetBack temperature."""
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]]
await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE])
hass.services.async_register(
DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema
) |
Return the UV label for the UV index. | def get_uv_label(uv_index: int) -> str:
"""Return the UV label for the UV index."""
label = next(
label for label in UV_LABEL_DEFINITIONS if uv_index >= label.minimum_index
)
return label.value |
Get OpenWeatherMap configuration and add language to it. | def _get_owm_config(language: str) -> dict[str, Any]:
"""Get OpenWeatherMap configuration and add language to it."""
config_dict = get_default_config()
config_dict["language"] = language
return config_dict |
Set up the opnsense component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the opnsense component."""
conf = config[DOMAIN]
url = conf[CONF_URL]
api_key = conf[CONF_API_KEY]
api_secret = conf[CONF_API_SECRET]
verify_ssl = conf[CONF_VERIFY_SSL]
tracker_interfaces = conf[CONF_TRACKER_INTERFACE]
interfaces_client = diagnostics.InterfaceClient(
api_key, api_secret, url, verify_ssl, timeout=20
)
try:
interfaces_client.get_arp()
except APIException:
_LOGGER.exception("Failure while connecting to OPNsense API endpoint")
return False
if tracker_interfaces:
# Verify that specified tracker interfaces are valid
netinsight_client = diagnostics.NetworkInsightClient(
api_key, api_secret, url, verify_ssl, timeout=20
)
interfaces = list(netinsight_client.get_interfaces().values())
for interface in tracker_interfaces:
if interface not in interfaces:
_LOGGER.error(
"Specified OPNsense tracker interface %s is not found", interface
)
return False
hass.data[OPNSENSE_DATA] = {
"interfaces": interfaces_client,
CONF_TRACKER_INTERFACE: tracker_interfaces,
}
load_platform(hass, Platform.DEVICE_TRACKER, DOMAIN, tracker_interfaces, config)
return True |
Set up the Opple light platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Opple light platform."""
name = config[CONF_NAME]
host = config[CONF_HOST]
entity = OppleLight(name, host)
add_entities([entity])
_LOGGER.debug("Init light %s %s", host, entity.unique_id) |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
device_key.key
]
for device_key in sensor_update.entity_descriptions
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={},
) |
Set up the sensor platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the sensor platform."""
meter_number = config[CONF_METER_NUMBER]
try:
meter = Meter(meter_number)
except MeterError:
_LOGGER.error("Unable to create Oru meter")
return
add_entities([CurrentEnergyUsageSensor(meter)], True)
_LOGGER.debug("Oru meter_number = %s", meter_number) |
Set up S20 switches. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities_callback: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up S20 switches."""
switch_data = {}
switches = []
switch_conf = config.get(CONF_SWITCHES, [config])
if config.get(CONF_DISCOVERY):
_LOGGER.info("Discovering S20 switches")
switch_data.update(discover())
for switch in switch_conf:
switch_data[switch.get(CONF_HOST)] = switch
for host, data in switch_data.items():
try:
switches.append(
S20Switch(data.get(CONF_NAME), S20(host, mac=data.get(CONF_MAC)))
)
_LOGGER.info("Initialized S20 at %s", host)
except S20Exception:
_LOGGER.error("S20 at %s couldn't be initialized", host)
add_entities_callback(switches) |
Set up the Osram Lightify lights. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Osram Lightify lights."""
host = config[CONF_HOST]
try:
bridge = Lightify(host, log_level=logging.NOTSET)
except OSError:
_LOGGER.exception("Error connecting to bridge %s", host)
return
setup_bridge(bridge, add_entities, config) |
Set up the Lightify bridge. | def setup_bridge(bridge, add_entities, config):
"""Set up the Lightify bridge."""
lights = {}
groups = {}
groups_last_updated = [0]
def update_lights():
"""Update the lights objects with the latest info from the bridge."""
try:
new_lights = bridge.update_all_light_status(
config[CONF_INTERVAL_LIGHTIFY_STATUS]
)
lights_changed = bridge.lights_changed()
except TimeoutError:
_LOGGER.error("Timeout during updating of lights")
return 0
except OSError:
_LOGGER.error("OSError during updating of lights")
return 0
if new_lights and config[CONF_ALLOW_LIGHTIFY_NODES]:
new_entities = []
for addr, light in new_lights.items():
if (
light.devicetype().name == "SENSOR"
and not config[CONF_ALLOW_LIGHTIFY_SENSORS]
) or (
light.devicetype().name == "SWITCH"
and not config[CONF_ALLOW_LIGHTIFY_SWITCHES]
):
continue
if addr not in lights:
osram_light = OsramLightifyLight(
light, update_lights, lights_changed
)
lights[addr] = osram_light
new_entities.append(osram_light)
else:
lights[addr].update_luminary(light)
add_entities(new_entities)
return lights_changed
def update_groups():
"""Update the groups objects with the latest info from the bridge."""
lights_changed = update_lights()
try:
bridge.update_scene_list(config[CONF_INTERVAL_LIGHTIFY_CONF])
new_groups = bridge.update_group_list(config[CONF_INTERVAL_LIGHTIFY_CONF])
groups_updated = bridge.groups_updated()
except TimeoutError:
_LOGGER.error("Timeout during updating of scenes/groups")
return 0
except OSError:
_LOGGER.error("OSError during updating of scenes/groups")
return 0
if new_groups:
new_groups = {group.idx(): group for group in new_groups.values()}
new_entities = []
for idx, group in new_groups.items():
if idx not in groups:
osram_group = OsramLightifyGroup(
group, update_groups, groups_updated
)
groups[idx] = osram_group
new_entities.append(osram_group)
else:
groups[idx].update_luminary(group)
add_entities(new_entities)
if groups_updated > groups_last_updated[0]:
groups_last_updated[0] = groups_updated
for idx, osram_group in groups.items():
if idx not in new_groups:
osram_group.update_static_attributes()
return max(lights_changed, groups_updated)
update_lights()
if config[CONF_ALLOW_LIGHTIFY_GROUPS]:
update_groups() |
Return True if Home Assistant is running on a Home Assistant Yellow. | def _is_yellow(hass: HomeAssistant) -> bool:
"""Return True if Home Assistant is running on a Home Assistant Yellow."""
try:
yellow_hardware.async_info(hass)
except HomeAssistantError:
return False
return True |
Generate a default network name. | def compose_default_network_name(pan_id: int) -> str:
"""Generate a default network name."""
return f"ha-thread-{pan_id:04x}" |
Generate a random PAN ID. | def generate_random_pan_id() -> int:
"""Generate a random PAN ID."""
# PAN ID is 2 bytes, 0xffff is reserved for broadcast
return random.randint(0, 0xFFFE) |
Handle OTBR errors. | def _handle_otbr_error(
func: Callable[Concatenate[OTBRData, _P], Coroutine[Any, Any, _R]],
) -> Callable[Concatenate[OTBRData, _P], Coroutine[Any, Any, _R]]:
"""Handle OTBR errors."""
@wraps(func)
async def _func(self: OTBRData, *args: _P.args, **kwargs: _P.kwargs) -> _R:
try:
return await func(self, *args, **kwargs)
except python_otbr_api.OTBRError as exc:
raise HomeAssistantError("Failed to call OTBR API") from exc
return _func |
Warn user if insecure default network settings are used. | def _warn_on_default_network_settings(
hass: HomeAssistant, otbrdata: OTBRData, dataset_tlvs: bytes
) -> None:
"""Warn user if insecure default network settings are used."""
dataset = tlv_parser.parse_tlv(dataset_tlvs.hex())
insecure = False
if (
network_key := dataset.get(MeshcopTLVType.NETWORKKEY)
) is not None and network_key.data in INSECURE_NETWORK_KEYS:
insecure = True
if (
not insecure
and MeshcopTLVType.EXTPANID in dataset
and MeshcopTLVType.NETWORKNAME in dataset
and MeshcopTLVType.PSKC in dataset
):
ext_pan_id = dataset[MeshcopTLVType.EXTPANID]
network_name = cast(tlv_parser.NetworkName, dataset[MeshcopTLVType.NETWORKNAME])
pskc = dataset[MeshcopTLVType.PSKC].data
for passphrase in INSECURE_PASSPHRASES:
if pskc == compute_pskc(ext_pan_id.data, network_name.name, passphrase):
insecure = True
break
if insecure:
ir.async_create_issue(
hass,
DOMAIN,
f"insecure_thread_network_{otbrdata.entry_id}",
is_fixable=False,
is_persistent=False,
severity=ir.IssueSeverity.WARNING,
translation_key="insecure_thread_network",
)
else:
ir.async_delete_issue(
hass,
DOMAIN,
f"insecure_thread_network_{otbrdata.entry_id}",
) |
Set up the OTBR Websocket API. | def async_setup(hass: HomeAssistant) -> None:
"""Set up the OTBR Websocket API."""
websocket_api.async_register_command(hass, websocket_info)
websocket_api.async_register_command(hass, websocket_create_network)
websocket_api.async_register_command(hass, websocket_set_channel)
websocket_api.async_register_command(hass, websocket_set_network) |
Return the state of the device. | def _state_tsk_alarm_controller(select_state: Callable[[str], OverkizStateType]) -> str:
"""Return the state of the device."""
if (
cast(str, select_state(OverkizState.INTERNAL_INTRUSION_DETECTED))
== OverkizCommandParam.DETECTED
):
return STATE_ALARM_TRIGGERED
if cast(str, select_state(OverkizState.INTERNAL_CURRENT_ALARM_MODE)) != cast(
str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE)
):
return STATE_ALARM_PENDING
return MAP_INTERNAL_STATUS_STATE[
cast(str, select_state(OverkizState.INTERNAL_TARGET_ALARM_MODE))
] |
Return the state of the device. | def _state_stateful_alarm_controller(
select_state: Callable[[str], OverkizStateType],
) -> str:
"""Return the state of the device."""
if state := cast(str, select_state(OverkizState.CORE_ACTIVE_ZONES)):
# The Stateful Alarm Controller has 3 zones with the following options:
# (A, B, C, A,B, B,C, A,C, A,B,C). Since it is not possible to map this to AlarmControlPanel entity,
# only the most important zones are mapped, other zones can only be disarmed.
if state in MAP_CORE_ACTIVE_ZONES:
return MAP_CORE_ACTIVE_ZONES[state]
return STATE_ALARM_ARMED_CUSTOM_BYPASS
return STATE_ALARM_DISARMED |
Return the state of the device. | def _state_myfox_alarm_controller(
select_state: Callable[[str], OverkizStateType],
) -> str:
"""Return the state of the device."""
if (
cast(str, select_state(OverkizState.CORE_INTRUSION))
== OverkizCommandParam.DETECTED
):
return STATE_ALARM_TRIGGERED
return MAP_MYFOX_STATUS_STATE[
cast(str, select_state(OverkizState.MYFOX_ALARM_STATUS))
] |
Return the state of the device. | def _state_alarm_panel_controller(
select_state: Callable[[str], OverkizStateType],
) -> str:
"""Return the state of the device."""
return MAP_ARM_TYPE[
cast(str, select_state(OverkizState.VERISURE_ALARM_PANEL_MAIN_ARM_TYPE))
] |
Change the selected option for Open/Closed/Pedestrian. | def _select_option_open_closed_pedestrian(
option: str, execute_command: Callable[..., Awaitable[None]]
) -> Awaitable[None]:
"""Change the selected option for Open/Closed/Pedestrian."""
return execute_command(
{
OverkizCommandParam.CLOSED: OverkizCommand.CLOSE,
OverkizCommandParam.OPEN: OverkizCommand.OPEN,
OverkizCommandParam.PEDESTRIAN: OverkizCommand.SET_PEDESTRIAN_POSITION,
}[OverkizCommandParam(option)]
) |
Change the selected option for Open/Closed/Partial. | def _select_option_open_closed_partial(
option: str, execute_command: Callable[..., Awaitable[None]]
) -> Awaitable[None]:
"""Change the selected option for Open/Closed/Partial."""
return execute_command(
{
OverkizCommandParam.CLOSED: OverkizCommand.CLOSE,
OverkizCommandParam.OPEN: OverkizCommand.OPEN,
OverkizCommandParam.PARTIAL: OverkizCommand.PARTIAL_POSITION,
}[OverkizCommandParam(option)]
) |
Change the selected option for Memorized Simple Volume. | def _select_option_memorized_simple_volume(
option: str, execute_command: Callable[..., Awaitable[None]]
) -> Awaitable[None]:
"""Change the selected option for Memorized Simple Volume."""
return execute_command(OverkizCommand.SET_MEMORIZED_SIMPLE_VOLUME, option) |
Change the selected option for Active Zone(s). | def _select_option_active_zone(
option: str, execute_command: Callable[..., Awaitable[None]]
) -> Awaitable[None]:
"""Change the selected option for Active Zone(s)."""
# Turn alarm off when empty zone is selected
if option == "":
return execute_command(OverkizCommand.ALARM_OFF)
return execute_command(OverkizCommand.ALARM_ZONE_ON, option) |
Create Overkiz local client. | def create_local_client(
hass: HomeAssistant, host: str, token: str, verify_ssl: bool
) -> OverkizClient:
"""Create Overkiz local client."""
session = async_create_clientsession(hass, verify_ssl=verify_ssl)
return OverkizClient(
username="",
password="",
token=token,
session=session,
server=generate_local_server(host=host),
verify_ssl=verify_ssl,
) |
Create Overkiz cloud client. | def create_cloud_client(
hass: HomeAssistant, username: str, password: str, server: OverkizServer
) -> OverkizClient:
"""Create Overkiz cloud client."""
# To allow users with multiple accounts/hubs, we create a new session so they have separate cookies
session = async_create_clientsession(hass)
return OverkizClient(
username=username, password=password, session=session, server=server
) |
Test if we support encryption. | def supports_encryption() -> bool:
"""Test if we support encryption."""
return nacl is not None |
Return decryption function and length of key.
Async friendly. | def get_cipher():
"""Return decryption function and length of key.
Async friendly.
"""
def decrypt(ciphertext, key):
"""Decrypt ciphertext using key."""
return SecretBox(key).decrypt(ciphertext, encoder=Base64Encoder)
return (SecretBox.KEY_SIZE, decrypt) |
Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple.
Async friendly. | def _parse_topic(topic, subscribe_topic):
"""Parse an MQTT topic {sub_topic}/user/dev, return (user, dev) tuple.
Async friendly.
"""
subscription = subscribe_topic.split("/")
try:
user_index = subscription.index("#")
except ValueError:
_LOGGER.error("Can't parse subscription topic: '%s'", subscribe_topic)
raise
topic_list = topic.split("/")
try:
user, device = topic_list[user_index], topic_list[user_index + 1]
except IndexError:
_LOGGER.error("Can't parse topic: '%s'", topic)
raise
return user, device |
Parse the OwnTracks location parameters, into the format see expects.
Async friendly. | def _parse_see_args(message, subscribe_topic):
"""Parse the OwnTracks location parameters, into the format see expects.
Async friendly.
"""
user, device = _parse_topic(message["topic"], subscribe_topic)
dev_id = slugify(f"{user}_{device}")
kwargs = {"dev_id": dev_id, "host_name": user, "attributes": {}}
if message["lat"] is not None and message["lon"] is not None:
kwargs["gps"] = (message["lat"], message["lon"])
else:
kwargs["gps"] = None
if "acc" in message:
kwargs["gps_accuracy"] = message["acc"]
if "batt" in message:
kwargs["battery"] = message["batt"]
if "vel" in message:
kwargs["attributes"]["velocity"] = message["vel"]
if "tid" in message:
kwargs["attributes"]["tid"] = message["tid"]
if "addr" in message:
kwargs["attributes"]["address"] = message["addr"]
if "cog" in message:
kwargs["attributes"]["course"] = message["cog"]
if "bs" in message:
kwargs["attributes"]["battery_status"] = message["bs"]
if "t" in message:
if message["t"] in ("c", "u"):
kwargs["source_type"] = SourceType.GPS
if message["t"] == "b":
kwargs["source_type"] = SourceType.BLUETOOTH_LE
return dev_id, kwargs |
Set the see parameters from the zone parameters.
Async friendly. | def _set_gps_from_zone(kwargs, location, zone):
"""Set the see parameters from the zone parameters.
Async friendly.
"""
if zone is not None:
kwargs["gps"] = (
zone.attributes[ATTR_LATITUDE],
zone.attributes[ATTR_LONGITUDE],
)
kwargs["gps_accuracy"] = zone.attributes["radius"]
kwargs["location_name"] = location
return kwargs |
Decrypt encrypted payload. | def _decrypt_payload(secret, topic, ciphertext):
"""Decrypt encrypted payload."""
try:
if supports_encryption():
keylen, decrypt = get_cipher()
else:
_LOGGER.warning("Ignoring encrypted payload because nacl not installed")
return None
except OSError:
_LOGGER.warning("Ignoring encrypted payload because nacl not installed")
return None
if isinstance(secret, dict):
key = secret.get(topic)
else:
key = secret
if key is None:
_LOGGER.warning(
"Ignoring encrypted payload because no decryption key known for topic %s",
topic,
)
return None
key = key.encode("utf-8")
key = key[:keylen]
key = key.ljust(keylen, b"\0")
try:
message = decrypt(ciphertext, key)
message = message.decode("utf-8")
except ValueError:
_LOGGER.warning(
(
"Ignoring encrypted payload because unable to decrypt using key for"
" topic %s"
),
topic,
)
return None
_LOGGER.debug("Decrypted payload: %s", message)
return message |
Encrypt message. | def encrypt_message(secret, topic, message):
"""Encrypt message."""
keylen = SecretBox.KEY_SIZE
if isinstance(secret, dict):
key = secret.get(topic)
else:
key = secret
if key is None:
_LOGGER.warning(
"Unable to encrypt payload because no decryption key known for topic %s",
topic,
)
return None
key = key.encode("utf-8")
key = key[:keylen]
key = key.ljust(keylen, b"\0")
try:
message = message.encode("utf-8")
payload = SecretBox(key).encrypt(message, encoder=Base64Encoder)
_LOGGER.debug("Encrypted message: %s to %s", message, payload)
return payload.decode("utf-8")
except ValueError:
_LOGGER.warning("Unable to encrypt message for topic %s", topic)
return None |
Set up the Panasonic Blu-ray platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Panasonic Blu-ray platform."""
conf = discovery_info if discovery_info else config
# Register configured device with Home Assistant.
add_entities([PanasonicBluRay(conf[CONF_HOST], conf[CONF_NAME])]) |
Set up the Pandora media player platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Pandora media player platform."""
if not _pianobar_exists():
return
pandora = PandoraMediaPlayer("Pandora")
# Make sure we end the pandora subprocess on exit in case user doesn't
# power it down.
def _stop_pianobar(_event):
pandora.turn_off()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _stop_pianobar)
add_entities([pandora]) |
Verify that Pianobar is properly installed. | def _pianobar_exists():
"""Verify that Pianobar is properly installed."""
pianobar_exe = shutil.which("pianobar")
if pianobar_exe:
return True
_LOGGER.warning(
"The Pandora integration depends on the Pianobar client, which "
"cannot be found. Please install using instructions at "
"https://www.home-assistant.io/integrations/media_player.pandora/"
)
return False |
Pencom relay platform (pencompy). | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Pencom relay platform (pencompy)."""
# Assign configuration variables.
host = config[CONF_HOST]
port = config[CONF_PORT]
boards = config[CONF_BOARDS]
# Setup connection
try:
hub = Pencompy(host, port, boards=boards)
except OSError as error:
_LOGGER.error("Could not connect to pencompy: %s", error)
raise PlatformNotReady from error
# Add devices.
devs = []
for relay in config[CONF_RELAYS]:
name = relay[CONF_NAME]
board = relay[CONF_BOARD]
addr = relay[CONF_ADDR]
devs.append(PencomRelay(hub, board, addr, name))
add_entities(devs, True) |
Register a callback. | def async_register_callback(
hass: HomeAssistant,
_callback: Callable[[UpdateType, dict[str, Notification]], None],
) -> CALLBACK_TYPE:
"""Register a callback."""
return async_dispatcher_connect(
hass, SIGNAL_PERSISTENT_NOTIFICATIONS_UPDATED, _callback
) |
Generate a notification. | def create(
hass: HomeAssistant,
message: str,
title: str | None = None,
notification_id: str | None = None,
) -> None:
"""Generate a notification."""
hass.add_job(async_create, hass, message, title, notification_id) |
Remove a notification. | def dismiss(hass: HomeAssistant, notification_id: str) -> None:
"""Remove a notification."""
hass.add_job(async_dismiss, hass, notification_id) |
Generate a notification. | def async_create(
hass: HomeAssistant,
message: str,
title: str | None = None,
notification_id: str | None = None,
) -> None:
"""Generate a notification."""
notifications = _async_get_or_create_notifications(hass)
if notification_id is None:
notification_id = random_uuid_hex()
notifications[notification_id] = {
ATTR_MESSAGE: message,
ATTR_NOTIFICATION_ID: notification_id,
ATTR_TITLE: title,
ATTR_CREATED_AT: dt_util.utcnow(),
}
async_dispatcher_send(
hass,
SIGNAL_PERSISTENT_NOTIFICATIONS_UPDATED,
UpdateType.ADDED,
{notification_id: notifications[notification_id]},
) |
Get or create notifications data. | def _async_get_or_create_notifications(hass: HomeAssistant) -> dict[str, Notification]:
"""Get or create notifications data."""
return {} |
Remove a notification. | def async_dismiss(hass: HomeAssistant, notification_id: str) -> None:
"""Remove a notification."""
notifications = _async_get_or_create_notifications(hass)
if not (notification := notifications.pop(notification_id, None)):
return
async_dispatcher_send(
hass,
SIGNAL_PERSISTENT_NOTIFICATIONS_UPDATED,
UpdateType.REMOVED,
{notification_id: notification},
) |
Remove all notifications. | def async_dismiss_all(hass: HomeAssistant) -> None:
"""Remove all notifications."""
notifications = _async_get_or_create_notifications(hass)
notifications_copy = notifications.copy()
notifications.clear()
async_dispatcher_send(
hass,
SIGNAL_PERSISTENT_NOTIFICATIONS_UPDATED,
UpdateType.REMOVED,
notifications_copy,
) |
Return a list of persistent_notifications. | def websocket_get_notifications(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: Mapping[str, Any],
) -> None:
"""Return a list of persistent_notifications."""
connection.send_message(
websocket_api.result_message(
msg["id"], list(_async_get_or_create_notifications(hass).values())
)
) |
Send persistent_notification update. | def _async_send_notification_update(
connection: websocket_api.ActiveConnection,
msg_id: int,
update_type: UpdateType,
notifications: dict[str, Notification],
) -> None:
"""Send persistent_notification update."""
connection.send_message(
websocket_api.event_message(
msg_id, {"type": update_type, "notifications": notifications}
)
) |
Return a list of persistent_notifications. | def websocket_subscribe_notifications(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: Mapping[str, Any],
) -> None:
"""Return a list of persistent_notifications."""
notifications = _async_get_or_create_notifications(hass)
msg_id = msg["id"]
notify_func = partial(_async_send_notification_update, connection, msg_id)
connection.subscriptions[msg_id] = async_dispatcher_connect(
hass, SIGNAL_PERSISTENT_NOTIFICATIONS_UPDATED, notify_func
)
connection.send_result(msg_id)
notify_func(UpdateType.CURRENT, notifications) |
Describe group on off states. | def async_describe_on_off_states(
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
) -> None:
"""Describe group on off states."""
registry.on_off_states(DOMAIN, {STATE_HOME}, STATE_HOME, STATE_NOT_HOME) |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if new_state != old_state:
return True
return False |
Return all persons that reference the entity. | def persons_with_entity(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Return all persons that reference the entity."""
if (
DOMAIN not in hass.data
or split_entity_id(entity_id)[0] != DEVICE_TRACKER_DOMAIN
):
return []
component: EntityComponent[Person] = hass.data[DOMAIN][2]
return [
person_entity.entity_id
for person_entity in component.entities
if entity_id in person_entity.device_trackers
] |
Return all entities belonging to a person. | def entities_in_person(hass: HomeAssistant, entity_id: str) -> list[str]:
"""Return all entities belonging to a person."""
if DOMAIN not in hass.data:
return []
component: EntityComponent[Person] = hass.data[DOMAIN][2]
if (person_entity := component.get_entity(entity_id)) is None:
return []
return person_entity.device_trackers |
List persons. | def ws_list_person(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""List persons."""
yaml, storage, _ = hass.data[DOMAIN]
connection.send_result(
msg[ATTR_ID], {"storage": storage.async_items(), "config": yaml.async_items()}
) |
Get latest state. | def _get_latest(prev: State | None, curr: State) -> State:
"""Get latest state."""
if prev is None or curr.last_updated > prev.last_updated:
return curr
return prev |
Return True if at least one specified value is available within entry of list. | def _check_for_recording_entry(api: PhilipsTV, entry: str, value: str) -> bool:
"""Return True if at least one specified value is available within entry of list."""
if api.recordings_list is None:
return False
return any(rec.get(entry) == value for rec in api.recordings_list["recordings"]) |
Return trigger description for a turn on trigger. | def async_get_turn_on_trigger(device_id: str) -> dict[str, str]:
"""Return trigger description for a turn on trigger."""
return {
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_TYPE: TRIGGER_TYPE_TURN_ON,
} |
Extract the color settings data from a style. | def _get_settings(style: AmbilightCurrentConfiguration):
"""Extract the color settings data from a style."""
if style["styleName"] in ("FOLLOW_COLOR", "Lounge light"):
return style["colorSettings"]
if style["styleName"] == "FOLLOW_AUDIO":
return style["audioSettings"]
return None |
Return a cache keys to avoid always updating. | def _get_cache_keys(device: PhilipsTV):
"""Return a cache keys to avoid always updating."""
return (
device.on,
device.powerstate,
device.ambilight_current_configuration,
device.ambilight_mode,
) |
Calculate an average color over all ambilight pixels. | def _average_pixels(data):
"""Calculate an average color over all ambilight pixels."""
color_c = 0
color_r = 0.0
color_g = 0.0
color_b = 0.0
for layer in data.values():
for side in layer.values():
for pixel in side.values():
color_c += 1
color_r += pixel["r"]
color_g += pixel["g"]
color_b += pixel["b"]
if color_c:
color_r /= color_c
color_g /= color_c
color_b /= color_c
return color_r, color_g, color_b
return 0.0, 0.0, 0.0 |
Query the api client for the product name. | def product_search(api_client: PicnicAPI, product_name: str | None) -> None | str:
"""Query the api client for the product name."""
if product_name is None:
return None
search_result = api_client.search(product_name)
if not search_result or "items" not in search_result[0]:
return None
# Return the first valid result
for item in search_result[0]["items"]:
if "name" in item:
return str(item["id"])
return None |
Create an instance of the PicnicAPI client. | def create_picnic_client(entry: ConfigEntry):
"""Create an instance of the PicnicAPI client."""
return PicnicAPI(
auth_token=entry.data.get(CONF_ACCESS_TOKEN),
country_code=entry.data.get(CONF_COUNTRY_CODE),
) |
Set up Pico speech component. | def get_engine(hass, config, discovery_info=None):
"""Set up Pico speech component."""
if shutil.which("pico2wave") is None:
_LOGGER.error("'pico2wave' was not found")
return False
return PicoProvider(config[CONF_LANG]) |
Set up Pilight Binary Sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up Pilight Binary Sensor."""
if config.get(CONF_DISARM_AFTER_TRIGGER):
add_entities(
[
PilightTriggerSensor(
hass=hass,
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
rst_dly_sec=config.get(CONF_RESET_DELAY_SEC),
)
]
)
else:
add_entities(
[
PilightBinarySensor(
hass=hass,
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
on_value=config.get(CONF_PAYLOAD_ON),
off_value=config.get(CONF_PAYLOAD_OFF),
)
]
) |
Set up the Pilight platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Pilight platform."""
switches = config[CONF_LIGHTS]
devices = []
for dev_name, dev_config in switches.items():
devices.append(PilightLight(hass, dev_name, dev_config))
add_entities(devices) |
Set up Pilight Sensor. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up Pilight Sensor."""
add_entities(
[
PilightSensor(
hass=hass,
name=config.get(CONF_NAME),
variable=config.get(CONF_VARIABLE),
payload=config.get(CONF_PAYLOAD),
unit_of_measurement=config.get(CONF_UNIT_OF_MEASUREMENT),
)
]
) |
Set up the Pilight platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Pilight platform."""
switches = config[CONF_SWITCHES]
devices = []
for dev_name, dev_config in switches.items():
devices.append(PilightSwitch(hass, dev_name, dev_config))
add_entities(devices) |
Set up the Pilight component. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Pilight component."""
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
send_throttler = CallRateDelayThrottle(hass, config[DOMAIN][CONF_SEND_DELAY])
try:
pilight_client = pilight.Client(host=host, port=port)
except (OSError, TimeoutError) as err:
_LOGGER.error("Unable to connect to %s on port %s: %s", host, port, err)
return False
def start_pilight_client(_):
"""Run when Home Assistant starts."""
pilight_client.start()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_pilight_client)
def stop_pilight_client(_):
"""Run once when Home Assistant stops."""
pilight_client.stop()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_pilight_client)
@send_throttler.limited
def send_code(call: ServiceCall) -> None:
"""Send RF code to the pilight-daemon."""
# Change type to dict from mappingproxy since data has to be JSON
# serializable
message_data = dict(call.data)
try:
pilight_client.send_code(message_data)
except OSError:
_LOGGER.error("Pilight send failed for %s", str(message_data))
hass.services.register(DOMAIN, SERVICE_NAME, send_code, schema=RF_CODE_SCHEMA)
# Publish received codes on the HA event bus
# A whitelist of codes to be published in the event bus
whitelist = config[DOMAIN].get(CONF_WHITELIST)
def handle_received_code(data):
"""Run when RF codes are received."""
# Unravel dict of dicts to make event_data cut in automation rule
# possible
data = dict(
{"protocol": data["protocol"], "uuid": data["uuid"]}, **data["message"]
)
# No whitelist defined or data matches whitelist, put data on event bus
if not whitelist or all(str(data[key]) in whitelist[key] for key in whitelist):
hass.bus.fire(EVENT, data)
pilight_client.set_callback(handle_received_code)
return True |
Set up the Pioneer platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Pioneer platform."""
pioneer = PioneerDevice(
config[CONF_NAME],
config[CONF_HOST],
config[CONF_PORT],
config[CONF_TIMEOUT],
config[CONF_SOURCES],
)
if pioneer.update():
add_entities([pioneer]) |
Set up the PJLink platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the PJLink platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
name = config.get(CONF_NAME)
encoding = config.get(CONF_ENCODING)
password = config.get(CONF_PASSWORD)
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
hass_data = hass.data[DOMAIN]
device_label = f"{host}:{port}"
if device_label in hass_data:
return
device = PjLinkDevice(host, port, name, encoding, password)
hass_data[device_label] = device
add_entities([device], True) |
Format input source for display in UI. | def format_input_source(input_source_name, input_source_number):
"""Format input source for display in UI."""
return f"{input_source_name} {input_source_number}" |
Init webhook based on config entry. | def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry):
"""Init webhook based on config entry."""
webhook_id = entry.data[CONF_WEBHOOK_ID]
device_name = entry.data[CONF_DEVICE_NAME]
_set_entry_data(entry, hass)
webhook.async_register(
hass, DOMAIN, f"{DOMAIN}.{device_name}", webhook_id, handle_webhook
) |
Return name of device sensor. | def _device_id(data):
"""Return name of device sensor."""
return f"{data.get(ATTR_DEVICE_NAME)}_{data.get(ATTR_DEVICE_ID)}" |
Describe group on off states. | def async_describe_on_off_states(
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
) -> None:
"""Describe group on off states."""
registry.on_off_states(DOMAIN, {STATE_PROBLEM}, STATE_PROBLEM, STATE_OK) |
Play media. | def _play_media(
hass: HomeAssistant, chromecast: Chromecast, media_type: str, media_id: str
) -> None:
"""Play media."""
result = process_plex_payload(hass, media_type, media_id)
controller = PlexController()
chromecast.register_handler(controller)
offset_in_s = result.offset / 1000
controller.play_media(result.media, offset=offset_in_s) |
Return a set of the configured Plex servers. | def configured_servers(hass):
"""Return a set of the configured Plex servers."""
return {
entry.data[CONF_SERVER_IDENTIFIER]
for entry in hass.config_entries.async_entries(DOMAIN)
} |
Get typed data from hass.data. | def get_plex_data(hass: HomeAssistant) -> PlexData:
"""Get typed data from hass.data."""
return hass.data[DOMAIN] |
Get Plex server from hass.data. | def get_plex_server(hass: HomeAssistant, server_id: str) -> PlexServer:
"""Get Plex server from hass.data."""
return get_plex_data(hass)[SERVERS][server_id] |
Return a formatted title for the given media item. | def pretty_title(media, short_name=False):
"""Return a formatted title for the given media item."""
year = None
if media.type == "album":
if short_name:
title = media.title
else:
title = f"{media.parentTitle} - {media.title}"
elif media.type == "episode":
title = f"{media.seasonEpisode.upper()} - {media.title}"
if not short_name:
title = f"{media.grandparentTitle} - {title}"
elif media.type == "season":
title = media.title
if not short_name:
title = f"{media.parentTitle} - {title}"
elif media.type == "track":
title = f"{media.index}. {media.title}"
else:
title = media.title
if media.type in ["album", "movie", "season"]:
year = media.year
if year:
title += f" ({year!s})"
return title |
Implement the websocket media browsing helper. | def browse_media( # noqa: C901
hass, is_internal, media_content_type, media_content_id, *, platform=None
):
"""Implement the websocket media browsing helper."""
server_id = None
plex_server = None
special_folder = None
if media_content_id:
url = URL(media_content_id)
server_id = url.host
plex_server = get_plex_server(hass, server_id)
if media_content_type == "hub":
_, hub_location, hub_identifier = url.parts
elif media_content_type in ["library", "server"] and len(url.parts) > 2:
_, media_content_id, special_folder = url.parts
else:
media_content_id = url.name
if media_content_type in ("plex_root", None):
return root_payload(hass, is_internal, platform=platform)
def item_payload(item, short_name=False, extra_params=None):
"""Create response payload for a single media item."""
try:
media_class = ITEM_TYPE_MEDIA_CLASS[item.type]
except KeyError as err:
raise UnknownMediaType(f"Unknown type received: {item.type}") from err
payload = {
"title": pretty_title(item, short_name),
"media_class": media_class,
"media_content_id": generate_plex_uri(
server_id, item.ratingKey, params=extra_params
),
"media_content_type": item.type,
"can_play": True,
"can_expand": item.type in EXPANDABLES,
}
if hasattr(item, "thumbUrl"):
plex_server.thumbnail_cache.setdefault(str(item.ratingKey), item.thumbUrl)
if is_internal:
thumbnail = item.thumbUrl
else:
thumbnail = get_proxy_image_url(
server_id,
item.ratingKey,
)
payload["thumbnail"] = thumbnail
return BrowseMedia(**payload)
def server_payload():
"""Create response payload to describe libraries of the Plex server."""
server_info = BrowseMedia(
title=plex_server.friendly_name,
media_class=MediaClass.DIRECTORY,
media_content_id=generate_plex_uri(server_id, "server"),
media_content_type="server",
can_play=False,
can_expand=True,
children=[],
children_media_class=MediaClass.DIRECTORY,
thumbnail="https://brands.home-assistant.io/_/plex/logo.png",
)
if platform != "sonos":
server_info.children.append(
special_library_payload(server_info, "Recommended")
)
for library in plex_server.library.sections():
if library.type == "photo":
continue
if library.type != "artist" and platform == "sonos":
continue
server_info.children.append(library_section_payload(library))
server_info.children.append(playlists_payload())
return server_info
def library_contents(library):
"""Create response payload to describe contents of a specific library."""
library_info = library_section_payload(library)
library_info.children = [special_library_payload(library_info, "Recommended")]
for item in library.all():
try:
library_info.children.append(item_payload(item))
except UnknownMediaType:
continue
return library_info
def playlists_payload():
"""Create response payload for all available playlists."""
playlists_info = {
"title": "Playlists",
"media_class": MediaClass.DIRECTORY,
"media_content_id": generate_plex_uri(server_id, "all"),
"media_content_type": "playlists",
"can_play": False,
"can_expand": True,
"children": [],
}
for playlist in plex_server.playlists():
if playlist.playlistType != "audio" and platform == "sonos":
continue
try:
playlists_info["children"].append(item_payload(playlist))
except UnknownMediaType:
continue
response = BrowseMedia(**playlists_info)
response.children_media_class = MediaClass.PLAYLIST
return response
def build_item_response(payload):
"""Create response payload for the provided media query."""
try:
media = plex_server.lookup_media(**payload)
except MediaNotFound:
return None
try:
media_info = item_payload(media)
except UnknownMediaType:
return None
if media_info.can_expand:
media_info.children = []
if media.TYPE == "artist" and platform != "sonos":
if (station := media.station()) is not None:
media_info.children.append(station_payload(station))
for item in media:
try:
media_info.children.append(item_payload(item, short_name=True))
except UnknownMediaType:
continue
return media_info
if media_content_type == "hub":
if hub_location == "server":
hub = next(
x
for x in plex_server.library.hubs()
if x.hubIdentifier == hub_identifier
)
media_content_id = f"server/{hub.hubIdentifier}"
else:
library_section = plex_server.library.sectionByID(int(hub_location))
hub = next(
x for x in library_section.hubs() if x.hubIdentifier == hub_identifier
)
media_content_id = f"{hub.librarySectionID}/{hub.hubIdentifier}"
try:
children_media_class = ITEM_TYPE_MEDIA_CLASS[hub.type]
except KeyError as err:
raise UnknownMediaType(f"Unknown type received: {hub.type}") from err
payload = {
"title": hub.title,
"media_class": MediaClass.DIRECTORY,
"media_content_id": generate_plex_uri(server_id, media_content_id),
"media_content_type": "hub",
"can_play": False,
"can_expand": True,
"children": [],
"children_media_class": children_media_class,
}
for item in hub.items:
if hub.type == "station":
if platform == "sonos":
continue
payload["children"].append(station_payload(item))
else:
extra_params = None
hub_context = hub.context.split(".")[-1]
if hub_context in ("continue", "inprogress", "ondeck"):
extra_params = {"resume": 1}
payload["children"].append(
item_payload(item, extra_params=extra_params)
)
return BrowseMedia(**payload)
if special_folder:
if media_content_type == "server":
library_or_section = plex_server.library
children_media_class = MediaClass.DIRECTORY
title = plex_server.friendly_name
elif media_content_type == "library":
library_or_section = plex_server.library.sectionByID(int(media_content_id))
title = library_or_section.title
try:
children_media_class = ITEM_TYPE_MEDIA_CLASS[library_or_section.TYPE]
except KeyError as err:
raise UnknownMediaType(
f"Unknown type received: {library_or_section.TYPE}"
) from err
else:
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
)
payload = {
"title": title,
"media_class": MediaClass.DIRECTORY,
"media_content_id": generate_plex_uri(
server_id, f"{media_content_id}/{special_folder}"
),
"media_content_type": media_content_type,
"can_play": False,
"can_expand": True,
"children": [],
"children_media_class": children_media_class,
}
if special_folder == "Recommended":
for item in library_or_section.hubs():
if item.type == "photo":
continue
payload["children"].append(hub_payload(item))
return BrowseMedia(**payload)
try:
if media_content_type == "server":
return server_payload()
if media_content_type == "library":
library_id = int(media_content_id)
library = plex_server.library.sectionByID(library_id)
return library_contents(library)
except UnknownMediaType as err:
raise BrowseError(
f"Media not found: {media_content_type} / {media_content_id}"
) from err
if media_content_type == "playlists":
return playlists_payload()
payload = {
"media_type": DOMAIN,
"plex_key": int(media_content_id),
}
response = build_item_response(payload)
if response is None:
raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}")
return response |
Create a media_content_id URL for playable Plex media. | def generate_plex_uri(server_id, media_id, params=None):
"""Create a media_content_id URL for playable Plex media."""
if isinstance(media_id, int):
media_id = str(media_id)
if isinstance(media_id, str) and not media_id.startswith("/"):
media_id = f"/{media_id}"
return str(
URL.build(
scheme=DOMAIN,
host=server_id,
path=media_id,
query=params,
)
) |
Return root payload for Plex. | def root_payload(hass, is_internal, platform=None):
"""Return root payload for Plex."""
children = [
browse_media(
hass,
is_internal,
"server",
generate_plex_uri(server_id, ""),
platform=platform,
)
for server_id in get_plex_data(hass)[SERVERS]
]
if len(children) == 1:
return children[0]
return BrowseMedia(
title="Plex",
media_class=MediaClass.DIRECTORY,
media_content_id="",
media_content_type="plex_root",
can_play=False,
can_expand=True,
children=children,
) |
Create response payload for a single library section. | def library_section_payload(section):
"""Create response payload for a single library section."""
try:
children_media_class = ITEM_TYPE_MEDIA_CLASS[section.TYPE]
except KeyError as err:
raise UnknownMediaType(f"Unknown type received: {section.TYPE}") from err
server_id = section._server.machineIdentifier # pylint: disable=protected-access
return BrowseMedia(
title=section.title,
media_class=MediaClass.DIRECTORY,
media_content_id=generate_plex_uri(server_id, section.key),
media_content_type="library",
can_play=False,
can_expand=True,
children_media_class=children_media_class,
) |
Create response payload for special library folders. | def special_library_payload(parent_payload, special_type):
"""Create response payload for special library folders."""
title = f"{special_type} ({parent_payload.title})"
special_library_id = f"{parent_payload.media_content_id}/{special_type}"
return BrowseMedia(
title=title,
media_class=parent_payload.media_class,
media_content_id=special_library_id,
media_content_type=parent_payload.media_content_type,
can_play=False,
can_expand=True,
children_media_class=parent_payload.children_media_class,
) |
Create response payload for a hub. | def hub_payload(hub):
"""Create response payload for a hub."""
if hasattr(hub, "librarySectionID"):
media_content_id = f"{hub.librarySectionID}/{hub.hubIdentifier}"
else:
media_content_id = f"server/{hub.hubIdentifier}"
server_id = hub._server.machineIdentifier # pylint: disable=protected-access
payload = {
"title": hub.title,
"media_class": MediaClass.DIRECTORY,
"media_content_id": generate_plex_uri(server_id, media_content_id),
"media_content_type": "hub",
"can_play": False,
"can_expand": True,
}
return BrowseMedia(**payload) |
Create response payload for a music station. | def station_payload(station):
"""Create response payload for a music station."""
server_id = station._server.machineIdentifier # pylint: disable=protected-access
return BrowseMedia(
title=station.title,
media_class=ITEM_TYPE_MEDIA_CLASS[station.type],
media_content_id=generate_plex_uri(server_id, station.key),
media_content_type="station",
can_play=True,
can_expand=False,
) |
Generate an url for a Plex media browser image. | def get_proxy_image_url(
server_id: str,
media_content_id: str,
) -> str:
"""Generate an url for a Plex media browser image."""
return f"/api/plex_image_proxy/{server_id}/{media_content_id}" |
Ensure session is available for certain attributes. | def needs_session(
func: Callable[Concatenate[_PlexMediaPlayerT, _P], _R],
) -> Callable[Concatenate[_PlexMediaPlayerT, _P], _R | None]:
"""Ensure session is available for certain attributes."""
@wraps(func)
def get_session_attribute(
self: _PlexMediaPlayerT, *args: _P.args, **kwargs: _P.kwargs
) -> _R | None:
if self.session is None:
return None
return func(self, *args, **kwargs)
return get_session_attribute |
Set up Plex media_player entities. | def _async_add_entities(hass, registry, async_add_entities, server_id, new_entities):
"""Set up Plex media_player entities."""
_LOGGER.debug("New entities: %s", new_entities)
entities = []
plexserver = get_plex_server(hass, server_id)
for entity_params in new_entities:
plex_mp = PlexMediaPlayer(plexserver, **entity_params)
entities.append(plex_mp)
# Migration to per-server unique_ids
old_entity_id = registry.async_get_entity_id(
MP_DOMAIN, DOMAIN, plex_mp.machine_identifier
)
if old_entity_id is not None:
new_unique_id = f"{server_id}:{plex_mp.machine_identifier}"
_LOGGER.debug(
"Migrating unique_id from [%s] to [%s]",
plex_mp.machine_identifier,
new_unique_id,
)
registry.async_update_entity(old_entity_id, new_unique_id=new_unique_id)
async_add_entities(entities, True) |
Search for specified Plex media in the provided library section.
Returns a media item or a list of items if `allow_multiple` is set.
Raises MediaNotFound if the search was unsuccessful. | def search_media(
media_type: str,
library_section: LibrarySection,
allow_multiple: bool = False,
**kwargs,
) -> PlexObject | list[PlexObject]:
"""Search for specified Plex media in the provided library section.
Returns a media item or a list of items if `allow_multiple` is set.
Raises MediaNotFound if the search was unsuccessful.
"""
original_query = kwargs.copy()
search_query = {}
libtype = kwargs.pop("libtype", None)
# Preserve legacy service parameters
for legacy_key, key in LEGACY_PARAM_MAPPING.items():
if value := kwargs.pop(legacy_key, None):
_LOGGER.debug(
"Legacy parameter '%s' used, consider using '%s'", legacy_key, key
)
search_query[key] = value
search_query.update(**kwargs)
if not libtype:
# Default to a sane libtype if not explicitly provided
for preferred_libtype in PREFERRED_LIBTYPE_ORDER:
if any(key.startswith(preferred_libtype) for key in search_query):
libtype = preferred_libtype
break
search_query.update(libtype=libtype)
_LOGGER.debug("Processed search query: %s", search_query)
try:
results = library_section.search(**search_query)
except (BadRequest, NotFound) as exc:
raise MediaNotFound(f"Problem in query {original_query}: {exc}") from exc
if not results:
raise MediaNotFound(
f"No {media_type} results in '{library_section.title}' for {original_query}"
)
if len(results) > 1:
if allow_multiple:
return results
if title := search_query.get("title") or search_query.get("movie.title"):
exact_matches = [x for x in results if x.title.lower() == title.lower()]
if len(exact_matches) == 1:
return exact_matches[0]
raise MediaNotFound(
"Multiple matches, make content_id more specific or use `allow_multiple`:"
f" {results}"
)
return results[0] |
Scan a Plex library for new and updated media. | def refresh_library(hass: HomeAssistant, service_call: ServiceCall) -> None:
"""Scan a Plex library for new and updated media."""
plex_server_name = service_call.data.get("server_name")
library_name = service_call.data["library_name"]
plex_server = get_plex_server(hass, plex_server_name)
try:
library = plex_server.library.section(title=library_name)
except NotFound:
_LOGGER.error(
"Library with name '%s' not found in %s",
library_name,
[x.title for x in plex_server.library.sections()],
)
return
_LOGGER.debug("Scanning %s for new and updated media", library_name)
library.update() |
Retrieve a configured Plex server by name. | def get_plex_server(
hass: HomeAssistant,
plex_server_name: str | None = None,
plex_server_id: str | None = None,
) -> PlexServer:
"""Retrieve a configured Plex server by name."""
if DOMAIN not in hass.data:
raise HomeAssistantError("Plex integration not configured")
servers: dict[str, PlexServer] = get_plex_data(hass)[SERVERS]
if not servers:
raise HomeAssistantError("No Plex servers available")
if plex_server_id:
return servers[plex_server_id]
plex_servers = servers.values()
if plex_server_name:
plex_server = next(
(x for x in plex_servers if x.friendly_name == plex_server_name), None
)
if plex_server is not None:
return plex_server
friendly_names = [x.friendly_name for x in plex_servers]
raise HomeAssistantError(
f"Requested Plex server '{plex_server_name}' not found in {friendly_names}"
)
if len(plex_servers) == 1:
return next(iter(plex_servers))
friendly_names = [x.friendly_name for x in plex_servers]
raise HomeAssistantError(
"Multiple Plex servers configured, choose with 'plex_server' key:"
f" {friendly_names}"
) |
Look up Plex media using media_player.play_media service payloads. | def process_plex_payload(
hass: HomeAssistant,
content_type: str,
content_id: str,
default_plex_server: PlexServer | None = None,
supports_playqueues: bool = True,
) -> PlexMediaSearchResult:
"""Look up Plex media using media_player.play_media service payloads."""
plex_server = default_plex_server
extra_params = {}
if content_id.startswith(PLEX_URI_SCHEME + "{"):
# Handle the special payload of 'plex://{<json>}'
content_id = content_id.removeprefix(PLEX_URI_SCHEME)
content = json.loads(content_id)
elif content_id.startswith(PLEX_URI_SCHEME):
# Handle standard media_browser payloads
plex_url = URL(content_id)
if plex_url.name:
if len(plex_url.parts) == 2:
if plex_url.name == "search":
content = {}
else:
content = int(plex_url.name)
else:
# For "special" items like radio stations
content = plex_url.path
server_id = plex_url.host
plex_server = get_plex_server(hass, plex_server_id=server_id)
else: # noqa: PLR5501
# Handle legacy payloads without server_id in URL host position
if plex_url.host == "search":
content = {}
else:
content = int(plex_url.host) # type: ignore[arg-type]
extra_params = dict(plex_url.query)
else:
content = json.loads(content_id)
if isinstance(content, dict):
if plex_server_name := content.pop("plex_server", None):
plex_server = get_plex_server(hass, plex_server_name)
if not plex_server:
plex_server = get_plex_server(hass)
if content_type == "station":
if not supports_playqueues:
raise HomeAssistantError("Plex stations are not supported on this device")
playqueue = plex_server.create_station_playqueue(content)
return PlexMediaSearchResult(playqueue)
if isinstance(content, int):
content = {"plex_key": content}
content_type = DOMAIN
content.update(extra_params)
if playqueue_id := content.pop("playqueue_id", None):
if not supports_playqueues:
raise HomeAssistantError("Plex playqueues are not supported on this device")
try:
playqueue = plex_server.get_playqueue(playqueue_id)
except NotFound as err:
raise MediaNotFound(
f"PlayQueue '{playqueue_id}' could not be found"
) from err
return PlexMediaSearchResult(playqueue, content)
search_query = content.copy()
shuffle = search_query.pop("shuffle", 0)
# Remove internal kwargs before passing copy to plexapi
for internal_key in ("resume", "offset"):
search_query.pop(internal_key, None)
media = plex_server.lookup_media(content_type, **search_query)
if supports_playqueues and (isinstance(media, list) or shuffle):
playqueue = plex_server.create_playqueue(
media, includeRelated=0, shuffle=shuffle
)
return PlexMediaSearchResult(playqueue, content)
return PlexMediaSearchResult(media, content) |
Return whether the media_content_id is a valid Plex media_id. | def is_plex_media_id(media_content_id):
"""Return whether the media_content_id is a valid Plex media_id."""
return media_content_id and media_content_id.startswith(PLEX_URI_SCHEME) |
Clean up old and invalid devices from the registry. | def async_cleanup_plex_devices(hass, entry):
"""Clean up old and invalid devices from the registry."""
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
for device_entry in device_entries:
if (
len(
er.async_entries_for_device(
entity_registry, device_entry.id, include_disabled_entities=True
)
)
== 0
):
_LOGGER.debug(
"Removing orphaned device: %s / %s",
device_entry.name,
device_entry.identifiers,
)
device_registry.async_remove_device(device_entry.id) |
Generate base schema for gateways. | def _base_gw_schema(discovery_info: ZeroconfServiceInfo | None) -> vol.Schema:
"""Generate base schema for gateways."""
base_gw_schema = vol.Schema({vol.Required(CONF_PASSWORD): str})
if not discovery_info:
base_gw_schema = base_gw_schema.extend(
{
vol.Required(CONF_HOST): str,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Required(CONF_USERNAME, default=SMILE): vol.In(
{SMILE: FLOW_SMILE, STRETCH: FLOW_STRETCH}
),
}
)
return base_gw_schema |
Decorate Plugwise calls that send commands/make changes to the device.
A decorator that wraps the passed in function, catches Plugwise errors,
and requests an coordinator update to update status of the devices asap. | def plugwise_command(
func: Callable[Concatenate[_PlugwiseEntityT, _P], Awaitable[_R]],
) -> Callable[Concatenate[_PlugwiseEntityT, _P], Coroutine[Any, Any, _R]]:
"""Decorate Plugwise calls that send commands/make changes to the device.
A decorator that wraps the passed in function, catches Plugwise errors,
and requests an coordinator update to update status of the devices asap.
"""
async def handler(
self: _PlugwiseEntityT, *args: _P.args, **kwargs: _P.kwargs
) -> _R:
try:
return await func(self, *args, **kwargs)
except PlugwiseException as error:
raise HomeAssistantError(
f"Error communicating with API: {error}"
) from error
finally:
await self.coordinator.async_request_refresh()
return handler |
Migrate Plugwise entity entries.
- Migrates old unique ID's from old binary_sensors and switches to the new unique ID's | def async_migrate_entity_entry(entry: er.RegistryEntry) -> dict[str, Any] | None:
"""Migrate Plugwise entity entries.
- Migrates old unique ID's from old binary_sensors and switches to the new unique ID's
"""
if entry.domain == Platform.BINARY_SENSOR and entry.unique_id.endswith(
"-slave_boiler_state"
):
return {
"new_unique_id": entry.unique_id.replace(
"-slave_boiler_state", "-secondary_boiler_state"
)
}
if entry.domain == Platform.SWITCH and entry.unique_id.endswith("-plug"):
return {"new_unique_id": entry.unique_id.replace("-plug", "-relay")}
# No migration needed
return None |
Migrate Sensors if needed. | def migrate_sensor_entities(
hass: HomeAssistant,
coordinator: PlugwiseDataUpdateCoordinator,
) -> None:
"""Migrate Sensors if needed."""
ent_reg = er.async_get(hass)
# Migrating opentherm_outdoor_temperature
# to opentherm_outdoor_air_temperature sensor
for device_id, device in coordinator.data.devices.items():
if device.get("dev_class") != "heater_central":
continue
old_unique_id = f"{device_id}-outdoor_temperature"
if entity_id := ent_reg.async_get_entity_id(
Platform.SENSOR, DOMAIN, old_unique_id
):
new_unique_id = f"{device_id}-outdoor_air_temperature"
LOGGER.debug(
"Migrating entity %s from old unique ID '%s' to new unique ID '%s'",
entity_id,
old_unique_id,
new_unique_id,
)
ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) |
Set up the pocketcasts platform for sensors. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the pocketcasts platform for sensors."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
try:
api = pocketcasts.PocketCast(email=username, password=password)
_LOGGER.debug("Found %d podcasts", len(api.subscriptions))
add_entities([PocketCastsSensor(api)], True)
except OSError as err:
_LOGGER.error("Connection to server failed: %s", err) |
Register a flow implementation.
domain: Domain of the component responsible for the implementation.
name: Name of the component.
client_id: Client id.
client_secret: Client secret. | def register_flow_implementation(hass, domain, client_id, client_secret):
"""Register a flow implementation.
domain: Domain of the component responsible for the implementation.
name: Name of the component.
client_id: Client id.
client_secret: Client secret.
"""
if DATA_FLOW_IMPL not in hass.data:
hass.data[DATA_FLOW_IMPL] = OrderedDict()
hass.data[DATA_FLOW_IMPL][domain] = {
CONF_CLIENT_ID: client_id,
CONF_CLIENT_SECRET: client_secret,
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.