response
stringlengths 1
33.1k
| instruction
stringlengths 22
582k
|
---|---|
Remove the non-unique device registry entries. | def _async_remove_old_device_identifiers(
config_entry_id: str, device_registry: dr.DeviceRegistry, hub: BondHub
) -> None:
"""Remove the non-unique device registry entries."""
for device in hub.devices:
dev = device_registry.async_get_device(identifiers={(DOMAIN, device.device_id)})
if dev is None:
continue
if config_entry_id in dev.config_entries:
device_registry.async_remove_device(dev.id) |
Write the tls assets to disk. | def write_tls_asset(hass: HomeAssistant, filename: str, asset: bytes) -> None:
"""Write the tls assets to disk."""
makedirs(hass.config.path(DOMAIN), exist_ok=True)
with open(hass.config.path(DOMAIN, filename), "w", encoding="utf8") as file_handle:
file_handle.write(asset.decode("utf-8")) |
Create and store credentials and validate session. | def create_credentials_and_validate(
hass: HomeAssistant,
host: str,
user_input: dict[str, Any],
zeroconf_instance: zeroconf.HaZeroconf,
) -> dict[str, Any] | None:
"""Create and store credentials and validate session."""
helper = SHCRegisterClient(host, user_input[CONF_PASSWORD])
result = helper.register(host, "HomeAssistant")
if result is not None:
write_tls_asset(hass, CONF_SHC_CERT, result["cert"])
write_tls_asset(hass, CONF_SHC_KEY, result["key"])
session = SHCSession(
host,
hass.config.path(DOMAIN, CONF_SHC_CERT),
hass.config.path(DOMAIN, CONF_SHC_KEY),
True,
zeroconf_instance,
)
session.authenticate()
return result |
Get information from host. | def get_info_from_host(
hass: HomeAssistant, host: str, zeroconf_instance: zeroconf.HaZeroconf
) -> dict[str, str | None]:
"""Get information from host."""
session = SHCSession(
host,
"",
"",
True,
zeroconf_instance,
)
information = session.mdns_info()
return {"title": information.name, "unique_id": information.unique_id} |
Catch Bravia errors. | def catch_braviatv_errors(
func: Callable[Concatenate[_BraviaTVCoordinatorT, _P], Awaitable[None]],
) -> Callable[Concatenate[_BraviaTVCoordinatorT, _P], Coroutine[Any, Any, None]]:
"""Catch Bravia errors."""
@wraps(func)
async def wrapper(
self: _BraviaTVCoordinatorT,
*args: _P.args,
**kwargs: _P.kwargs,
) -> None:
"""Catch Bravia errors and log message."""
try:
await func(self, *args, **kwargs)
except BraviaError as err:
_LOGGER.error("Command error: %s", err)
await self.async_request_refresh()
return wrapper |
Return the domains available for a device type. | def get_domains(device_type: str) -> set[Platform]:
"""Return the domains available for a device type."""
return {d for d, t in DOMAINS_AND_TYPES.items() if device_type in t} |
Decode a data packet given for a Broadlink remote. | def data_packet(value):
"""Decode a data packet given for a Broadlink remote."""
value = cv.string(value)
extra = len(value) % 4
if extra > 0:
value = value + ("=" * (4 - extra))
return b64decode(value) |
Validate and convert a MAC address to bytes. | def mac_address(mac):
"""Validate and convert a MAC address to bytes."""
mac = cv.string(mac)
if len(mac) == 17:
mac = "".join(mac[i : i + 2] for i in range(0, 17, 3))
elif len(mac) == 14:
mac = "".join(mac[i : i + 4] for i in range(0, 14, 5))
elif len(mac) != 12:
raise ValueError("Invalid MAC address")
return bytes.fromhex(mac) |
Format a MAC address. | def format_mac(mac):
"""Format a MAC address."""
return ":".join([format(octet, "02x") for octet in mac]) |
Create a config flow for a device. | def import_device(hass, host):
"""Create a config flow for a device."""
configured_hosts = {
entry.data.get(CONF_HOST) for entry in hass.config_entries.async_entries(DOMAIN)
}
if host not in configured_hosts:
task = hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: host},
)
hass.async_create_task(task) |
Return an update manager for a given Broadlink device. | def get_update_manager(device):
"""Return an update manager for a given Broadlink device."""
update_managers = {
"A1": BroadlinkA1UpdateManager,
"BG1": BroadlinkBG1UpdateManager,
"HYS": BroadlinkThermostatUpdateManager,
"LB1": BroadlinkLB1UpdateManager,
"LB2": BroadlinkLB1UpdateManager,
"MP1": BroadlinkMP1UpdateManager,
"MP1S": BroadlinkMP1SUpdateManager,
"RM4MINI": BroadlinkRMUpdateManager,
"RM4PRO": BroadlinkRMUpdateManager,
"RMMINI": BroadlinkRMUpdateManager,
"RMMINIB": BroadlinkRMUpdateManager,
"RMPRO": BroadlinkRMUpdateManager,
"SP1": BroadlinkSP1UpdateManager,
"SP2": BroadlinkSP2UpdateManager,
"SP2S": BroadlinkSP2UpdateManager,
"SP3": BroadlinkSP2UpdateManager,
"SP3S": BroadlinkSP2UpdateManager,
"SP4": BroadlinkSP4UpdateManager,
"SP4B": BroadlinkSP4UpdateManager,
}
return update_managers[device.api.type](device) |
Get SNMP engine. | def get_snmp_engine(hass: HomeAssistant) -> hlapi.SnmpEngine:
"""Get SNMP engine."""
_LOGGER.debug("Creating SNMP engine")
snmp_engine = hlapi.SnmpEngine()
@callback
def shutdown_listener(ev: Event) -> None:
if hass.data.get(DOMAIN):
_LOGGER.debug("Unconfiguring SNMP engine")
lcd.unconfigure(hass.data[DOMAIN][SNMP], None)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_listener)
return snmp_engine |
Browse to URL. | def _browser_url(service: ServiceCall) -> None:
"""Browse to URL."""
webbrowser.open(service.data[ATTR_URL]) |
Listen for browse_url events. | def setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Listen for browse_url events."""
hass.services.register(
DOMAIN,
SERVICE_BROWSE_URL,
_browser_url,
schema=SERVICE_BROWSE_URL_SCHEMA,
)
return True |
Convert a binary sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a binary sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): BINARY_SENSOR_DESCRIPTIONS[
description.device_class
]
for device_key, description in sensor_update.binary_entity_descriptions.items()
if description.device_class
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.binary_entity_values.items()
},
) |
Convert a device key to an entity key. | def device_key_to_bluetooth_entity_key(
device_key: DeviceKey,
) -> PassiveBluetoothEntityKey:
"""Convert a device key to an entity key."""
return PassiveBluetoothEntityKey(device_key.key, device_key.device_id) |
Describe logbook events. | def async_describe_events(
hass: HomeAssistant,
async_describe_event: Callable[
[str, str, Callable[[Event[BTHomeBleEvent]], dict[str, str]]], None
],
) -> None:
"""Describe logbook events."""
dr = async_get(hass)
@callback
def async_describe_bthome_event(event: Event[BTHomeBleEvent]) -> dict[str, str]:
"""Describe bthome logbook event."""
data = event.data
device = dr.async_get(data["device_id"])
name = device and device.name or f'BTHome {data["address"]}'
if properties := data["event_properties"]:
message = f"{data['event_class']} {data['event_type']}: {properties}"
else:
message = f"{data['event_class']} {data['event_type']}"
return {
LOGBOOK_ENTRY_NAME: name,
LOGBOOK_ENTRY_MESSAGE: message,
}
async_describe_event(DOMAIN, BTHOME_BLE_EVENT, async_describe_bthome_event) |
Convert a sensor update to a bluetooth data update. | def sensor_update_to_bluetooth_data_update(
sensor_update: SensorUpdate,
) -> PassiveBluetoothDataUpdate:
"""Convert a sensor update to a bluetooth data update."""
return PassiveBluetoothDataUpdate(
devices={
device_id: sensor_device_info_to_hass_device_info(device_info)
for device_id, device_info in sensor_update.devices.items()
},
entity_descriptions={
device_key_to_bluetooth_entity_key(device_key): SENSOR_DESCRIPTIONS[
(description.device_class, description.native_unit_of_measurement)
]
for device_key, description in sensor_update.entity_descriptions.items()
if description.device_class
},
entity_data={
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
for device_key, sensor_values in sensor_update.entity_values.items()
},
entity_names={
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
for device_key, sensor_values in sensor_update.entity_values.items()
},
) |
Process a BluetoothServiceInfoBleak, running side effects and returning sensor data. | def process_service_info(
hass: HomeAssistant,
entry: ConfigEntry,
data: BTHomeBluetoothDeviceData,
service_info: BluetoothServiceInfoBleak,
device_registry: DeviceRegistry,
) -> SensorUpdate:
"""Process a BluetoothServiceInfoBleak, running side effects and returning sensor data."""
update = data.update(service_info)
coordinator: BTHomePassiveBluetoothProcessorCoordinator = hass.data[DOMAIN][
entry.entry_id
]
discovered_event_classes = coordinator.discovered_event_classes
if entry.data.get(CONF_SLEEPY_DEVICE, False) != data.sleepy_device:
hass.config_entries.async_update_entry(
entry,
data=entry.data | {CONF_SLEEPY_DEVICE: data.sleepy_device},
)
if update.events:
address = service_info.device.address
for device_key, event in update.events.items():
sensor_device_info = update.devices[device_key.device_id]
device = device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(CONNECTION_BLUETOOTH, address)},
identifiers={(BLUETOOTH_DOMAIN, address)},
manufacturer=sensor_device_info.manufacturer,
model=sensor_device_info.model,
name=sensor_device_info.name,
sw_version=sensor_device_info.sw_version,
hw_version=sensor_device_info.hw_version,
)
# event_class may be postfixed with a number, ie 'button_2'
# but if there is only one button then it will be 'button'
event_class = event.device_key.key
event_type = event.event_type
ble_event = BTHomeBleEvent(
device_id=device.id,
address=address,
event_class=event_class, # ie 'button'
event_type=event_type, # ie 'press'
event_properties=event.event_properties,
)
if event_class not in discovered_event_classes:
discovered_event_classes.add(event_class)
hass.config_entries.async_update_entry(
entry,
data=entry.data
| {CONF_DISCOVERED_EVENT_CLASSES: list(discovered_event_classes)},
)
async_dispatcher_send(
hass, format_discovered_event_class(address), event_class, ble_event
)
hass.bus.async_fire(BTHOME_BLE_EVENT, ble_event)
async_dispatcher_send(
hass,
format_event_dispatcher_name(address, event_class),
ble_event,
)
# If payload is encrypted and the bindkey is not verified then we need to reauth
if data.encryption_scheme != EncryptionScheme.NONE and not data.bindkey_verified:
entry.async_start_reauth(hass, data={"device": data})
return update |
Format an event dispatcher name. | def format_event_dispatcher_name(
address: str, event_class: str
) -> SignalType[BTHomeBleEvent]:
"""Format an event dispatcher name."""
return SignalType(f"{DOMAIN}_event_{address}_{event_class}") |
Format a discovered event class. | def format_discovered_event_class(address: str) -> SignalType[str, BTHomeBleEvent]:
"""Format a discovered event class."""
return SignalType(f"{DOMAIN}_discovered_event_class_{address}") |
Return a BT Home Hub 5 scanner if successful. | def get_scanner(
hass: HomeAssistant, config: ConfigType
) -> BTHomeHub5DeviceScanner | None:
"""Return a BT Home Hub 5 scanner if successful."""
scanner = BTHomeHub5DeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
Return a BT Smart Hub scanner if successful. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> BTSmartHubScanner | None:
"""Return a BT Smart Hub scanner if successful."""
info = config[DOMAIN]
smarthub_client = BTSmartHub(
router_ip=info[CONF_HOST], smarthub_model=info.get(CONF_SMARTHUB_MODEL)
)
scanner = BTSmartHubScanner(smarthub_client)
return scanner if scanner.success_init else None |
Create new device from the dict. | def _create_device(data):
"""Create new device from the dict."""
ip_address = data.get("IPAddress")
mac = data.get("PhysAddress")
host = data.get("UserHostName")
status = data.get("Active")
name = data.get("name")
return _Device(ip_address, mac, host, status, name) |
Log at warn level after WARN_THRESHOLD failures, debug otherwise. | def threshold_log(count: int, *args, **kwargs) -> None:
"""Log at warn level after WARN_THRESHOLD failures, debug otherwise."""
if count >= WARN_THRESHOLD:
_LOGGER.warning(*args, **kwargs)
else:
_LOGGER.debug(*args, **kwargs) |
Return the value of the CalDav object attribute if defined. | def get_attr_value(obj: caldav.CalendarObjectResource, attribute: str) -> str | None:
"""Return the value of the CalDav object attribute if defined."""
if hasattr(obj, attribute):
return getattr(obj, attribute).value
return None |
Convert a caldav Todo into a TodoItem. | def _todo_item(resource: caldav.CalendarObjectResource) -> TodoItem | None:
"""Convert a caldav Todo into a TodoItem."""
if (
not hasattr(resource.instance, "vtodo")
or not (todo := resource.instance.vtodo)
or (uid := get_attr_value(todo, "uid")) is None
or (summary := get_attr_value(todo, "summary")) is None
):
return None
due: date | datetime | None = None
if due_value := get_attr_value(todo, "due"):
if isinstance(due_value, datetime):
due = dt_util.as_local(due_value)
elif isinstance(due_value, date):
due = due_value
return TodoItem(
uid=uid,
summary=summary,
status=TODO_STATUS_MAP.get(
get_attr_value(todo, "status") or "",
TodoItemStatus.NEEDS_ACTION,
),
due=due,
description=get_attr_value(todo, "description"),
) |
Get the calendar entity for the provided entity_id. | def get_entity(hass: HomeAssistant, entity_id: str) -> CalendarEntity:
"""Get the calendar entity for the provided entity_id."""
component: EntityComponent[CalendarEntity] = hass.data[DOMAIN]
if not (entity := component.get_entity(entity_id)) or not isinstance(
entity, CalendarEntity
):
raise HomeAssistantError(
f"Entity does not exist {entity_id} or is not a calendar entity"
)
return entity |
Build an async_get_events wrapper to fetch events during a time span. | def event_fetcher(hass: HomeAssistant, entity_id: str) -> EventFetcher:
"""Build an async_get_events wrapper to fetch events during a time span."""
async def async_get_events(timespan: Timespan) -> list[CalendarEvent]:
"""Return events active in the specified time span."""
entity = get_entity(hass, entity_id)
# Expand by one second to make the end time exclusive
end_time = timespan.end + datetime.timedelta(seconds=1)
return await entity.async_get_events(hass, timespan.start, end_time)
return async_get_events |
Build a fetcher that produces a schedule of upcoming trigger events. | def queued_event_fetcher(
fetcher: EventFetcher, event_type: str, offset: datetime.timedelta
) -> QueuedEventFetcher:
"""Build a fetcher that produces a schedule of upcoming trigger events."""
def get_trigger_time(event: CalendarEvent) -> datetime.datetime:
if event_type == EVENT_START:
return event.start_datetime_local
return event.end_datetime_local
async def async_get_events(timespan: Timespan) -> list[QueuedCalendarEvent]:
"""Get calendar event triggers eligible to fire in the time span."""
offset_timespan = timespan.with_offset(-1 * offset)
active_events = await fetcher(offset_timespan)
# Determine the trigger eligibility of events during this time span.
# Example: For an EVENT_END trigger the event may start during this
# time span, but need to be triggered later when the end happens.
results = []
for trigger_time, event in zip(
map(get_trigger_time, active_events), active_events, strict=False
):
if trigger_time not in offset_timespan:
continue
results.append(QueuedCalendarEvent(trigger_time + offset, event))
_LOGGER.debug(
"Scan events @ %s%s found %s eligible of %s active",
offset_timespan,
f" (offset={offset})" if offset else "",
len(results),
len(active_events),
)
results.sort(key=lambda x: x.trigger_time)
return results
return async_get_events |
Assert that all datetime values have a timezone. | def _has_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Assert that all datetime values have a timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Validate that all datetime values have a timezone."""
for k in keys:
if (
(value := obj.get(k))
and isinstance(value, datetime.datetime)
and value.tzinfo is None
):
raise vol.Invalid("Expected all values to have a timezone")
return obj
return validate |
Verify that all datetime values have a consistent timezone. | def _has_consistent_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all datetime values have a consistent timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys that are datetime values have the same timezone."""
tzinfos = []
for key in keys:
if not (value := obj.get(key)) or not isinstance(value, datetime.datetime):
return obj
tzinfos.append(value.tzinfo)
uniq_values = groupby(tzinfos)
if len(list(uniq_values)) > 1:
raise vol.Invalid("Expected all values to have the same timezone")
return obj
return validate |
Convert all datetime values to the local timezone. | def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Convert all datetime values to the local timezone."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Convert all keys that are datetime values to local timezone."""
for k in keys:
if (value := obj.get(k)) and isinstance(value, datetime.datetime):
obj[k] = dt_util.as_local(value)
return obj
return validate |
Verify that the time span between start and end has a minimum duration. | def _has_min_duration(
start_key: str, end_key: str, min_duration: datetime.timedelta
) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that the time span between start and end has a minimum duration."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
duration = end - start
if duration < min_duration:
raise vol.Invalid(
f"Expected minimum event duration of {min_duration} ({start}, {end})"
)
return obj
return validate |
Verify that all values are of the same type. | def _has_same_type(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
"""Verify that all values are of the same type."""
def validate(obj: dict[str, Any]) -> dict[str, Any]:
"""Test that all keys in the dict have values of the same type."""
uniq_values = groupby(type(obj[k]) for k in keys)
if len(list(uniq_values)) > 1:
raise vol.Invalid(f"Expected all values to be the same type: {keys}")
return obj
return validate |
Validate a recurrence rule string. | def _validate_rrule(value: Any) -> str:
"""Validate a recurrence rule string."""
if value is None:
raise vol.Invalid("rrule value is None")
if not isinstance(value, str):
raise vol.Invalid("rrule value expected a string")
try:
rrulestr(value)
except ValueError as err:
raise vol.Invalid(f"Invalid rrule '{value}': {err}") from err
# Example format: FREQ=DAILY;UNTIL=...
rule_parts = dict(s.split("=", 1) for s in value.split(";"))
if not (freq := rule_parts.get("FREQ")):
raise vol.Invalid("rrule did not contain FREQ")
if freq not in VALID_FREQS:
raise vol.Invalid(f"Invalid frequency for rule: {value}")
return str(value) |
Convert any empty string values to None. | def _empty_as_none(value: str | None) -> str | None:
"""Convert any empty string values to None."""
return value or None |
Get the dateTime from date or dateTime as a local. | def get_date(date: dict[str, Any]) -> datetime.datetime:
"""Get the dateTime from date or dateTime as a local."""
if "date" in date:
parsed_date = dt_util.parse_date(date["date"])
assert parsed_date
return dt_util.start_of_local_day(
datetime.datetime.combine(parsed_date, datetime.time.min)
)
parsed_datetime = dt_util.parse_datetime(date["dateTime"])
assert parsed_datetime
return dt_util.as_local(parsed_datetime) |
Convert CalendarEvent dataclass items to dictionary of attributes. | def _event_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
"""Convert CalendarEvent dataclass items to dictionary of attributes."""
result: dict[str, str] = {}
for name, value in obj:
if isinstance(value, (datetime.datetime, datetime.date)):
result[name] = value.isoformat()
elif value is not None:
result[name] = str(value)
return result |
Convert CalendarEvent dataclass items to the API format. | def _api_event_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, Any]:
"""Convert CalendarEvent dataclass items to the API format."""
result: dict[str, Any] = {}
for name, value in obj:
if isinstance(value, datetime.datetime):
result[name] = {"dateTime": dt_util.as_local(value).isoformat()}
elif isinstance(value, datetime.date):
result[name] = {"date": value.isoformat()}
else:
result[name] = value
return result |
Convert CalendarEvent dataclass items to dictionary of attributes. | def _list_events_dict_factory(
obj: Iterable[tuple[str, Any]],
) -> dict[str, JsonValueType]:
"""Convert CalendarEvent dataclass items to dictionary of attributes."""
return {
name: value
for name, value in _event_dict_factory(obj).items()
if name in LIST_EVENT_FIELDS and value is not None
} |
Convert a calendar event date/datetime to a datetime if needed. | def _get_datetime_local(
dt_or_d: datetime.datetime | datetime.date,
) -> datetime.datetime:
"""Convert a calendar event date/datetime to a datetime if needed."""
if isinstance(dt_or_d, datetime.datetime):
return dt_util.as_local(dt_or_d)
return dt_util.start_of_local_day(dt_or_d) |
Convert a calendar event date/datetime to a datetime if needed. | def _get_api_date(dt_or_d: datetime.datetime | datetime.date) -> dict[str, str]:
"""Convert a calendar event date/datetime to a datetime if needed."""
if isinstance(dt_or_d, datetime.datetime):
return {"dateTime": dt_util.as_local(dt_or_d).isoformat()}
return {"date": dt_or_d.isoformat()} |
Extract the offset from the event summary.
Return a tuple with the updated event summary and offset time. | def extract_offset(summary: str, offset_prefix: str) -> tuple[str, datetime.timedelta]:
"""Extract the offset from the event summary.
Return a tuple with the updated event summary and offset time.
"""
# check if we have an offset tag in the message
# time is HH:MM or MM
reg = f"{offset_prefix}([+-]?[0-9]{{0,2}}(:[0-9]{{0,2}})?)"
search = re.search(reg, summary)
if search and search.group(1):
time = search.group(1)
if ":" not in time:
if time[0] in ("+", "-"):
time = f"{time[0]}0:{time[1:]}"
else:
time = f"0:{time}"
offset_time = time_period_str(time)
summary = (summary[: search.start()] + summary[search.end() :]).strip()
return (summary, offset_time)
return (summary, datetime.timedelta()) |
Have we reached the offset time specified in the event title. | def is_offset_reached(
start: datetime.datetime, offset_time: datetime.timedelta
) -> bool:
"""Have we reached the offset time specified in the event title."""
if offset_time == datetime.timedelta():
return False
return start + offset_time <= dt_util.now(start.tzinfo) |
Parse a create event service call and convert the args ofr a create event entity call.
This converts the input service arguments into a `start` and `end` date or date time. This
exists because service calls use `start_date` and `start_date_time` whereas the
normal entity methods can take either a `datetime` or `date` as a single `start` argument.
It also handles the other service call variations like "in days" as well. | def _validate_timespan(
values: dict[str, Any],
) -> tuple[datetime.datetime | datetime.date, datetime.datetime | datetime.date]:
"""Parse a create event service call and convert the args ofr a create event entity call.
This converts the input service arguments into a `start` and `end` date or date time. This
exists because service calls use `start_date` and `start_date_time` whereas the
normal entity methods can take either a `datetime` or `date` as a single `start` argument.
It also handles the other service call variations like "in days" as well.
"""
if event_in := values.get(EVENT_IN):
days = event_in.get(EVENT_IN_DAYS, 7 * event_in.get(EVENT_IN_WEEKS, 0))
today = datetime.date.today()
return (
today + datetime.timedelta(days=days),
today + datetime.timedelta(days=days + 1),
)
if EVENT_START_DATE in values and EVENT_END_DATE in values:
return (values[EVENT_START_DATE], values[EVENT_END_DATE])
if EVENT_START_DATETIME in values and EVENT_END_DATETIME in values:
return (values[EVENT_START_DATETIME], values[EVENT_END_DATETIME])
raise ValueError("Missing required fields to set start or end date/datetime") |
Find a supported scaling factor to scale the image.
If there is no exact match, we use one size up to ensure
the image remains crisp. | def find_supported_scaling_factor(
current_width: int, current_height: int, target_width: int, target_height: int
) -> tuple[int, int] | None:
"""Find a supported scaling factor to scale the image.
If there is no exact match, we use one size up to ensure
the image remains crisp.
"""
for idx, supported_sf in enumerate(SUPPORTED_SCALING_FACTORS):
ratio = supported_sf[0] / supported_sf[1]
width_after_scale = current_width * ratio
height_after_scale = current_height * ratio
if width_after_scale == target_width and height_after_scale == target_height:
return supported_sf
if width_after_scale < target_width or height_after_scale < target_height:
return None if idx == 0 else SUPPORTED_SCALING_FACTORS[idx - 1]
# Giant image, the most we can reduce by is 1/8
return SUPPORTED_SCALING_FACTORS[-1] |
Scale a camera image.
Scale as close as possible to one of the supported scaling factors. | def scale_jpeg_camera_image(cam_image: Image, width: int, height: int) -> bytes:
"""Scale a camera image.
Scale as close as possible to one of the supported scaling factors.
"""
turbo_jpeg = TurboJPEGSingleton.instance()
if not turbo_jpeg:
return cam_image.content
try:
(current_width, current_height, _, _) = turbo_jpeg.decode_header(
cam_image.content
)
except OSError:
return cam_image.content
scaling_factor = find_supported_scaling_factor(
current_width, current_height, width, height
)
if scaling_factor is None:
return cam_image.content
return cast(
bytes,
turbo_jpeg.scale_with_quality(
cam_image.content,
scaling_factor=scaling_factor,
quality=JPEG_QUALITY,
),
) |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if old_state != new_state:
return True
return None |
Get camera component from entity_id. | def _get_camera_from_entity_id(hass: HomeAssistant, entity_id: str) -> Camera:
"""Get camera component from entity_id."""
if (component := hass.data.get(DOMAIN)) is None:
raise HomeAssistantError("Camera integration not set up")
if (camera := component.get_entity(entity_id)) is None:
raise HomeAssistantError("Camera not found")
if not camera.is_on:
raise HomeAssistantError("Camera is off")
return cast(Camera, camera) |
Register an RTSP to WebRTC provider.
The first provider to satisfy the offer will be used. | def async_register_rtsp_to_web_rtc_provider(
hass: HomeAssistant,
domain: str,
provider: RtspToWebRtcProviderType,
) -> Callable[[], None]:
"""Register an RTSP to WebRTC provider.
The first provider to satisfy the offer will be used.
"""
if DOMAIN not in hass.data:
raise ValueError("Unexpected state, camera not loaded")
def remove_provider() -> None:
if domain in hass.data[DATA_RTSP_TO_WEB_RTC]:
del hass.data[DATA_RTSP_TO_WEB_RTC]
hass.async_create_task(_async_refresh_providers(hass))
hass.data.setdefault(DATA_RTSP_TO_WEB_RTC, {})
hass.data[DATA_RTSP_TO_WEB_RTC][domain] = provider
hass.async_create_task(_async_refresh_providers(hass))
return remove_provider |
Return registered RTSP to WebRTC providers. | def _async_get_rtsp_to_web_rtc_providers(
hass: HomeAssistant,
) -> Iterable[RtspToWebRtcProviderType]:
"""Return registered RTSP to WebRTC providers."""
providers: dict[str, RtspToWebRtcProviderType] = hass.data.get(
DATA_RTSP_TO_WEB_RTC, {}
)
return providers.values() |
Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user. | def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
"""Validate the user input allows us to connect.
Data has the keys from DATA_SCHEMA with values provided by the user.
"""
# constructor does login call
Api(
data[CONF_USERNAME],
data[CONF_PASSWORD],
data.get(CONF_TIMEOUT, DEFAULT_TIMEOUT),
)
return True |
Initialize a new instance of CanaryApi. | def _get_canary_api_instance(entry: ConfigEntry) -> Api:
"""Initialize a new instance of CanaryApi."""
return Api(
entry.data[CONF_USERNAME],
entry.data[CONF_PASSWORD],
entry.options.get(CONF_TIMEOUT, DEFAULT_TIMEOUT),
) |
Discover a Chromecast. | def discover_chromecast(
hass: HomeAssistant, cast_info: pychromecast.models.CastInfo
) -> None:
"""Discover a Chromecast."""
info = ChromecastInfo(
cast_info=cast_info,
)
if info.uuid is None:
_LOGGER.error("Discovered chromecast without uuid %s", info)
return
info = info.fill_out_missing_chromecast_info(hass)
_LOGGER.debug("Discovered new or updated chromecast %s", info)
dispatcher_send(hass, SIGNAL_CAST_DISCOVERED, info) |
Set up the pychromecast internal discovery. | def setup_internal_discovery(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Set up the pychromecast internal discovery."""
if INTERNAL_DISCOVERY_RUNNING_KEY not in hass.data:
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY] = threading.Lock()
if not hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].acquire(blocking=False):
# Internal discovery is already running
return
class CastListener(pychromecast.discovery.AbstractCastListener):
"""Listener for discovering chromecasts."""
def add_cast(self, uuid, _):
"""Handle zeroconf discovery of a new chromecast."""
discover_chromecast(hass, browser.devices[uuid])
def update_cast(self, uuid, _):
"""Handle zeroconf discovery of an updated chromecast."""
discover_chromecast(hass, browser.devices[uuid])
def remove_cast(self, uuid, service, cast_info):
"""Handle zeroconf discovery of a removed chromecast."""
_remove_chromecast(
hass,
ChromecastInfo(
cast_info=cast_info,
),
)
_LOGGER.debug("Starting internal pychromecast discovery")
browser = pychromecast.discovery.CastBrowser(
CastListener(),
ChromeCastZeroconf.get_zeroconf(),
config_entry.data.get(CONF_KNOWN_HOSTS),
)
hass.data[CAST_BROWSER_KEY] = browser
browser.start_discovery()
def stop_discovery(event):
"""Stop discovery of new chromecasts."""
_LOGGER.debug("Stopping internal pychromecast discovery")
browser.stop_discovery()
hass.data[INTERNAL_DISCOVERY_RUNNING_KEY].release()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_discovery)
config_entry.add_update_listener(config_entry_updated) |
Validate the URL can be parsed and at least has scheme + netloc. | def _is_url(url):
"""Validate the URL can be parsed and at least has scheme + netloc."""
result = urlparse(url)
return all([result.scheme, result.netloc]) |
Handle PyChromecastError and reraise a HomeAssistantError. | def api_error(
func: _FuncType[_CastDeviceT, _P, _R],
) -> _ReturnFuncType[_CastDeviceT, _P, _R]:
"""Handle PyChromecastError and reraise a HomeAssistantError."""
@wraps(func)
def wrapper(self: _CastDeviceT, *args: _P.args, **kwargs: _P.kwargs) -> _R:
"""Wrap a CastDevice method."""
try:
return_value = func(self, *args, **kwargs)
except PyChromecastError as err:
raise HomeAssistantError(
f"{self.__class__.__name__}.{func.__name__} Failed: {err}"
) from err
return return_value
return wrapper |
Create a CastDevice entity or dynamic group from the chromecast object.
Returns None if the cast device has already been added. | def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
"""Create a CastDevice entity or dynamic group from the chromecast object.
Returns None if the cast device has already been added.
"""
_LOGGER.debug("_async_create_cast_device: %s", info)
if info.uuid is None:
_LOGGER.error("_async_create_cast_device uuid none: %s", info)
return None
# Found a cast with UUID
added_casts = hass.data[ADDED_CAST_DEVICES_KEY]
if info.uuid in added_casts:
# Already added this one, the entity will take care of moved hosts
# itself
return None
# -> New cast device
added_casts.add(info.uuid)
if info.is_dynamic_group:
# This is a dynamic group, do not add it but connect to the service.
group = DynamicCastGroup(hass, info)
group.async_setup()
return None
return CastMediaPlayerEntity(hass, info) |
Register a cast platform. | def _register_cast_platform(
hass: HomeAssistant, integration_domain: str, platform: CastProtocol
):
"""Register a cast platform."""
if (
not hasattr(platform, "async_get_media_browser_root_object")
or not hasattr(platform, "async_browse_media")
or not hasattr(platform, "async_play_media")
):
raise HomeAssistantError(f"Invalid cast platform {platform}")
hass.data[DOMAIN]["cast_platform"][integration_domain] = platform |
Get the Unify Circuit notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> CircuitNotificationService | None:
"""Get the Unify Circuit notification service."""
if discovery_info is None:
return None
return CircuitNotificationService(discovery_info) |
Validate the configuration and return a Cisco scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> CiscoDeviceScanner | None:
"""Validate the configuration and return a Cisco scanner."""
scanner = CiscoDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None |
Parse a Cisco formatted HW address to normal MAC.
e.g. convert
001d.ec02.07ab
to:
00:1D:EC:02:07:AB
Takes in cisco_hwaddr: HWAddr String from Cisco ARP table
Returns a regular standard MAC address | def _parse_cisco_mac_address(cisco_hardware_addr):
"""Parse a Cisco formatted HW address to normal MAC.
e.g. convert
001d.ec02.07ab
to:
00:1D:EC:02:07:AB
Takes in cisco_hwaddr: HWAddr String from Cisco ARP table
Returns a regular standard MAC address
"""
cisco_hardware_addr = cisco_hardware_addr.replace(".", "")
blocks = [
cisco_hardware_addr[x : x + 2] for x in range(0, len(cisco_hardware_addr), 2)
]
return ":".join(blocks).upper() |
Validate the configuration and return a Cisco ME scanner. | def get_scanner(hass: HomeAssistant, config: ConfigType) -> CiscoMEDeviceScanner | None:
"""Validate the configuration and return a Cisco ME scanner."""
config = config[DOMAIN]
controller = CiscoMobilityExpress(
config[CONF_HOST],
config[CONF_USERNAME],
config[CONF_PASSWORD],
config[CONF_SSL],
config[CONF_VERIFY_SSL],
)
if not controller.is_logged_in():
return None
return CiscoMEDeviceScanner(controller) |
Get the CiscoWebexTeams notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> CiscoWebexTeamsNotificationService | None:
"""Get the CiscoWebexTeams notification service."""
client = WebexTeamsAPI(access_token=config[CONF_TOKEN])
try:
# Validate the token & room_id
client.rooms.get(config[CONF_ROOM_ID])
except exceptions.ApiError as error:
_LOGGER.error(error)
return None
return CiscoWebexTeamsNotificationService(client, config[CONF_ROOM_ID]) |
Set up the Clementine platform. | def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Clementine platform."""
host = config[CONF_HOST]
port = config[CONF_PORT]
token = config.get(CONF_ACCESS_TOKEN)
client = ClementineRemote(host, port, token, reconnect=True)
add_entities([ClementineDevice(client, config[CONF_NAME])]) |
Get the Clickatell notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> ClickatellNotificationService:
"""Get the Clickatell notification service."""
return ClickatellNotificationService(config) |
Get the ClickSend notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> ClicksendNotificationService | None:
"""Get the ClickSend notification service."""
if not _authenticate(config):
_LOGGER.error("You are not authorized to access ClickSend")
return None
return ClicksendNotificationService(config) |
Authenticate with ClickSend. | def _authenticate(config: ConfigType) -> bool:
"""Authenticate with ClickSend."""
api_url = f"{BASE_API_URL}/account"
resp = requests.get(
api_url,
headers=HEADERS,
auth=(config[CONF_USERNAME], config[CONF_API_KEY]),
timeout=TIMEOUT,
)
return resp.status_code == HTTPStatus.OK |
Get the ClickSend notification service. | def get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> ClicksendNotificationService | None:
"""Get the ClickSend notification service."""
if not _authenticate(config):
_LOGGER.error("You are not authorized to access ClickSend")
return None
return ClicksendNotificationService(config) |
Authenticate with ClickSend. | def _authenticate(config):
"""Authenticate with ClickSend."""
api_url = f"{BASE_API_URL}/account"
resp = requests.get(
api_url,
headers=HEADERS,
auth=(config.get(CONF_USERNAME), config.get(CONF_API_KEY)),
timeout=TIMEOUT,
)
return resp.status_code == HTTPStatus.OK |
Create a function to test a device condition. | def async_condition_from_config(
hass: HomeAssistant, config: ConfigType
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
registry = er.async_get(hass)
entity_id = er.async_resolve_entity_id(registry, config[ATTR_ENTITY_ID])
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
if not entity_id or (state := hass.states.get(entity_id)) is None:
return False
if config[CONF_TYPE] == "is_hvac_mode":
return bool(state.state == config[const.ATTR_HVAC_MODE])
return bool(
state.attributes.get(const.ATTR_PRESET_MODE)
== config[const.ATTR_PRESET_MODE]
)
return test_is_state |
Describe group on off states. | def async_describe_on_off_states(
hass: HomeAssistant, registry: "GroupIntegrationRegistry"
) -> None:
"""Describe group on off states."""
registry.on_off_states(
DOMAIN,
{
STATE_ON,
HVACMode.HEAT,
HVACMode.COOL,
HVACMode.HEAT_COOL,
HVACMode.AUTO,
HVACMode.FAN_ONLY,
},
STATE_ON,
STATE_OFF,
) |
Test if state significantly changed. | def async_check_significant_change(
hass: HomeAssistant,
old_state: str,
old_attrs: dict,
new_state: str,
new_attrs: dict,
**kwargs: Any,
) -> bool | None:
"""Test if state significantly changed."""
if old_state != new_state:
return True
old_attrs_s = set(
{k: v for k, v in old_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items()
)
new_attrs_s = set(
{k: v for k, v in new_attrs.items() if k in SIGNIFICANT_ATTRIBUTES}.items()
)
changed_attrs: set[str] = {item[0] for item in old_attrs_s ^ new_attrs_s}
ha_unit = hass.config.units.temperature_unit
for attr_name in changed_attrs:
if attr_name in [
ATTR_AUX_HEAT,
ATTR_FAN_MODE,
ATTR_HVAC_ACTION,
ATTR_PRESET_MODE,
ATTR_SWING_MODE,
]:
return True
old_attr_value = old_attrs.get(attr_name)
new_attr_value = new_attrs.get(attr_name)
if new_attr_value is None or not check_valid_float(new_attr_value):
# New attribute value is invalid, ignore it
continue
if old_attr_value is None or not check_valid_float(old_attr_value):
# Old attribute value was invalid, we should report again
return True
absolute_change: float | None = None
if attr_name in [
ATTR_CURRENT_TEMPERATURE,
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ATTR_TEMPERATURE,
]:
if ha_unit == UnitOfTemperature.FAHRENHEIT:
absolute_change = 1.0
else:
absolute_change = 0.5
if attr_name in [ATTR_CURRENT_HUMIDITY, ATTR_HUMIDITY]:
absolute_change = 1.0
if absolute_change and check_absolute_change(
old_attr_value, new_attr_value, absolute_change
):
return True
# no significant attribute change detected
return False |
Set up cloud account link. | def async_setup(hass: HomeAssistant) -> None:
"""Set up cloud account link."""
config_entry_oauth2_flow.async_add_implementation_provider(
hass, DOMAIN, async_provide_implementation
) |
Return if the entity is supported.
This is called when migrating from legacy config format to avoid exposing
all binary sensors and sensors. | def entity_supported(hass: HomeAssistant, entity_id: str) -> bool:
"""Return if the entity is supported.
This is called when migrating from legacy config format to avoid exposing
all binary sensors and sensors.
"""
domain = split_entity_id(entity_id)[0]
if domain in SUPPORTED_DOMAINS:
return True
try:
device_class = get_device_class(hass, entity_id)
except HomeAssistantError:
# The entity no longer exists
return False
if (
domain == "binary_sensor"
and device_class in SUPPORTED_BINARY_SENSOR_DEVICE_CLASSES
):
return True
if domain == "sensor" and device_class in SUPPORTED_SENSOR_DEVICE_CLASSES:
return True
return False |
Return if the entity is supported.
This is called when migrating from legacy config format to avoid exposing
all binary sensors and sensors. | def _supported_legacy(hass: HomeAssistant, entity_id: str) -> bool:
"""Return if the entity is supported.
This is called when migrating from legacy config format to avoid exposing
all binary sensors and sensors.
"""
domain = split_entity_id(entity_id)[0]
if domain in SUPPORTED_DOMAINS:
return True
try:
device_class = get_device_class(hass, entity_id)
except HomeAssistantError:
# The entity no longer exists
return False
if (
domain == "binary_sensor"
and device_class in SUPPORTED_BINARY_SENSOR_DEVICE_CLASSES
):
return True
if domain == "sensor" and device_class in SUPPORTED_SENSOR_DEVICE_CLASSES:
return True
return False |
Initialize the HTTP API. | def async_setup(hass: HomeAssistant) -> None:
"""Initialize the HTTP API."""
websocket_api.async_register_command(hass, websocket_cloud_remove_data)
websocket_api.async_register_command(hass, websocket_cloud_status)
websocket_api.async_register_command(hass, websocket_subscription)
websocket_api.async_register_command(hass, websocket_update_prefs)
websocket_api.async_register_command(hass, websocket_hook_create)
websocket_api.async_register_command(hass, websocket_hook_delete)
websocket_api.async_register_command(hass, websocket_remote_connect)
websocket_api.async_register_command(hass, websocket_remote_disconnect)
websocket_api.async_register_command(hass, google_assistant_get)
websocket_api.async_register_command(hass, google_assistant_list)
websocket_api.async_register_command(hass, google_assistant_update)
websocket_api.async_register_command(hass, alexa_get)
websocket_api.async_register_command(hass, alexa_list)
websocket_api.async_register_command(hass, alexa_sync)
websocket_api.async_register_command(hass, thingtalk_convert)
websocket_api.async_register_command(hass, tts_info)
hass.http.register_view(GoogleActionsSyncView)
hass.http.register_view(CloudLoginView)
hass.http.register_view(CloudLogoutView)
hass.http.register_view(CloudRegisterView)
hass.http.register_view(CloudResendConfirmView)
hass.http.register_view(CloudForgotPasswordView)
_CLOUD_ERRORS.update(
{
auth.UserNotFound: (HTTPStatus.BAD_REQUEST, "User does not exist."),
auth.UserNotConfirmed: (HTTPStatus.BAD_REQUEST, "Email not confirmed."),
auth.UserExists: (
HTTPStatus.BAD_REQUEST,
"An account with the given email already exists.",
),
auth.Unauthenticated: (HTTPStatus.UNAUTHORIZED, "Authentication failed."),
auth.PasswordChangeRequired: (
HTTPStatus.BAD_REQUEST,
"Password change required.",
),
}
) |
Webview decorator to handle auth errors. | def _handle_cloud_errors(
handler: Callable[
Concatenate[_HassViewT, web.Request, _P], Awaitable[web.Response]
],
) -> Callable[
Concatenate[_HassViewT, web.Request, _P], Coroutine[Any, Any, web.Response]
]:
"""Webview decorator to handle auth errors."""
@wraps(handler)
async def error_handler(
view: _HassViewT, request: web.Request, *args: _P.args, **kwargs: _P.kwargs
) -> web.Response:
"""Handle exceptions that raise from the wrapped request handler."""
try:
result = await handler(view, request, *args, **kwargs)
except Exception as err: # pylint: disable=broad-except
status, msg = _process_cloud_exception(err, request.path)
return view.json_message(
msg, status_code=status, message_code=err.__class__.__name__.lower()
)
return result
return error_handler |
Websocket decorator to handle auth errors. | def _ws_handle_cloud_errors(
handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None],
],
) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None],
]:
"""Websocket decorator to handle auth errors."""
@wraps(handler)
async def error_handler(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Handle exceptions that raise from the wrapped handler."""
try:
return await handler(hass, connection, msg)
except Exception as err: # pylint: disable=broad-except
err_status, err_msg = _process_cloud_exception(err, msg["type"])
connection.send_error(msg["id"], str(err_status), err_msg)
return error_handler |
Process a cloud exception. | def _process_cloud_exception(exc: Exception, where: str) -> tuple[HTTPStatus, str]:
"""Process a cloud exception."""
err_info: tuple[HTTPStatus, str] | None = None
for err, value_info in _CLOUD_ERRORS.items():
if isinstance(exc, err):
err_info = value_info
break
if err_info is None:
_LOGGER.exception("Unexpected error processing request for %s", where)
err_info = (HTTPStatus.BAD_GATEWAY, f"Unexpected error: {exc}")
return err_info |
Websocket decorator that requires cloud to be logged in. | def _require_cloud_login(
handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None,
],
) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None,
]:
"""Websocket decorator that requires cloud to be logged in."""
@wraps(handler)
def with_cloud_auth(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Require to be logged into the cloud."""
cloud: Cloud[CloudClient] = hass.data[DOMAIN]
if not cloud.is_logged_in:
connection.send_message(
websocket_api.error_message(
msg["id"], "not_logged_in", "You need to be logged in to the cloud."
)
)
return
handler(hass, connection, msg)
return with_cloud_auth |
Validate language and voice. | def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
"""Validate language and voice."""
language, voice = value
if language not in TTS_VOICES:
raise vol.Invalid(f"Invalid language {language}")
if voice not in TTS_VOICES[language]:
raise vol.Invalid(f"Invalid voice {voice} for language {language}")
return value |
Fetch available tts info. | def tts_info(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Fetch available tts info."""
connection.send_result(
msg["id"],
{
"languages": [
(language, voice)
for language, voices in TTS_VOICES.items()
for voice in voices
]
},
) |
Manage the legacy subscription issue.
If the provider is "legacy" create an issue,
in all other cases remove the issue. | def async_manage_legacy_subscription_issue(
hass: HomeAssistant,
subscription_info: dict[str, Any],
) -> None:
"""Manage the legacy subscription issue.
If the provider is "legacy" create an issue,
in all other cases remove the issue.
"""
if subscription_info.get("provider") == "legacy":
ir.async_create_issue(
hass=hass,
domain=DOMAIN,
issue_id="legacy_subscription",
is_fixable=True,
severity=ir.IssueSeverity.WARNING,
translation_key="legacy_subscription",
)
return
ir.async_delete_issue(hass=hass, domain=DOMAIN, issue_id="legacy_subscription") |
Register system health callbacks. | def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info, "/config/cloud") |
Validate if platform is deprecated. | def _deprecated_platform(value: str) -> str:
"""Validate if platform is deprecated."""
if value == DOMAIN:
_LOGGER.warning(
"The cloud tts platform configuration is deprecated, "
"please remove it from your configuration "
"and use the UI to change settings instead"
)
hass = async_get_hass()
async_create_issue(
hass,
DOMAIN,
"deprecated_tts_platform_config",
breaks_in_ha_version="2024.9.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="deprecated_tts_platform_config",
)
return value |
Validate chosen gender or language. | def validate_lang(value: dict[str, Any]) -> dict[str, Any]:
"""Validate chosen gender or language."""
if (lang := value.get(CONF_LANG)) is None:
return value
if (gender := value.get(ATTR_GENDER)) is None:
gender = value[ATTR_GENDER] = next(
(chk_gender for chk_lang, chk_gender in MAP_VOICE if chk_lang == lang), None
)
if (lang, gender) not in MAP_VOICE:
raise vol.Invalid("Unsupported language and gender specified.")
return value |
Handle deprecated gender. | def handle_deprecated_gender(
hass: HomeAssistant,
gender: Gender | str | None,
) -> Gender | None:
"""Handle deprecated gender."""
if gender is None:
return None
async_create_issue(
hass,
DOMAIN,
"deprecated_gender",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
breaks_in_ha_version="2024.10.0",
translation_key="deprecated_gender",
translation_placeholders={
"integration_name": "Home Assistant Cloud",
"deprecated_option": "gender",
"replacement_option": "voice",
},
)
return Gender(gender) |
Handle deprecated voice. | def handle_deprecated_voice(
hass: HomeAssistant,
original_voice: str | None,
) -> str | None:
"""Handle deprecated voice."""
voice = original_voice
if (
original_voice
and voice
and (voice := DEPRECATED_VOICES.get(original_voice, original_voice))
!= original_voice
):
async_create_issue(
hass,
DOMAIN,
f"deprecated_voice_{original_voice}",
is_fixable=True,
is_persistent=True,
severity=IssueSeverity.WARNING,
breaks_in_ha_version="2024.8.0",
translation_key="deprecated_voice",
translation_placeholders={
"deprecated_voice": original_voice,
"replacement_voice": voice,
},
)
return voice |
Get the strict connection mode. | def get_strict_connection_mode(hass: HomeAssistant) -> http.const.StrictConnectionMode:
"""Get the strict connection mode."""
cloud: Cloud[CloudClient] = hass.data[DOMAIN]
return cloud.client.prefs.strict_connection |
Test if user is logged in.
Note: This returns True even if not currently connected to the cloud. | def async_is_logged_in(hass: HomeAssistant) -> bool:
"""Test if user is logged in.
Note: This returns True even if not currently connected to the cloud.
"""
return DOMAIN in hass.data and hass.data[DOMAIN].is_logged_in |
Test if connected to the cloud. | def async_is_connected(hass: HomeAssistant) -> bool:
"""Test if connected to the cloud."""
return DOMAIN in hass.data and hass.data[DOMAIN].iot.connected |
Notify on connection state changes. | def async_listen_connection_change(
hass: HomeAssistant,
target: Callable[[CloudConnectionState], Awaitable[None] | None],
) -> Callable[[], None]:
"""Notify on connection state changes."""
return async_dispatcher_connect(hass, SIGNAL_CLOUD_CONNECTION_STATE, target) |
Test if user has an active subscription. | def async_active_subscription(hass: HomeAssistant) -> bool:
"""Test if user has an active subscription."""
return async_is_logged_in(hass) and not hass.data[DOMAIN].subscription_expired |
Get the remote UI URL. | def async_remote_ui_url(hass: HomeAssistant) -> str:
"""Get the remote UI URL."""
if not async_is_logged_in(hass):
raise CloudNotAvailable
if not hass.data[DOMAIN].client.prefs.remote_enabled:
raise CloudNotAvailable
if not (remote_domain := hass.data[DOMAIN].client.prefs.remote_domain):
raise CloudNotAvailable
return f"https://{remote_domain}" |
Handle remote preferences updated. | def _remote_handle_prefs_updated(cloud: Cloud[CloudClient]) -> None:
"""Handle remote preferences updated."""
cur_pref = cloud.client.prefs.remote_enabled
lock = asyncio.Lock()
# Sync remote connection with prefs
async def remote_prefs_updated(prefs: CloudPreferences) -> None:
"""Update remote status."""
nonlocal cur_pref
async with lock:
if prefs.remote_enabled == cur_pref:
return
if cur_pref := prefs.remote_enabled:
await cloud.remote.connect()
else:
await cloud.remote.disconnect()
cloud.client.prefs.async_listen_updates(remote_prefs_updated) |
Set up services for cloud component. | def _setup_services(hass: HomeAssistant, prefs: CloudPreferences) -> None:
"""Set up services for cloud component."""
async def _service_handler(service: ServiceCall) -> None:
"""Handle service for cloud."""
if service.service == SERVICE_REMOTE_CONNECT:
await prefs.async_update(remote_enabled=True)
elif service.service == SERVICE_REMOTE_DISCONNECT:
await prefs.async_update(remote_enabled=False)
async_register_admin_service(hass, DOMAIN, SERVICE_REMOTE_CONNECT, _service_handler)
async_register_admin_service(
hass, DOMAIN, SERVICE_REMOTE_DISCONNECT, _service_handler
)
async def create_temporary_strict_connection_url(
call: ServiceCall,
) -> ServiceResponse:
"""Create a strict connection url and return it."""
# Copied form homeassistant/helpers/service.py#_async_admin_handler
# as the helper supports no responses yet
if call.context.user_id:
user = await hass.auth.async_get_user(call.context.user_id)
if user is None:
raise UnknownUser(context=call.context)
if not user.is_admin:
raise Unauthorized(context=call.context)
if prefs.strict_connection is http.const.StrictConnectionMode.DISABLED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="strict_connection_not_enabled",
)
try:
url = get_url(hass, require_cloud=True)
except NoURLAvailableError as ex:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_url_available",
) from ex
path = async_sign_path(
hass,
STRICT_CONNECTION_URL,
timedelta(hours=1),
use_content_user=True,
)
url = urljoin(url, path)
return {
"url": f"https://login.home-assistant.io?u={quote_plus(url)}",
"direct_url": url,
}
hass.services.async_register(
DOMAIN,
"create_temporary_strict_connection_url",
create_temporary_strict_connection_url,
supports_response=SupportsResponse.ONLY,
) |
Zone selection schema. | def _zone_schema(zones: list[pycfdns.ZoneModel] | None = None) -> vol.Schema:
"""Zone selection schema."""
zones_list = []
if zones is not None:
zones_list = [zones["name"] for zones in zones]
return vol.Schema({vol.Required(CONF_ZONE): vol.In(zones_list)}) |
Zone records selection schema. | def _records_schema(records: list[pycfdns.RecordModel] | None = None) -> vol.Schema:
"""Zone records selection schema."""
records_dict = {}
if records:
records_dict = {name["name"]: name["name"] for name in records}
return vol.Schema({vol.Required(CONF_RECORDS): cv.multi_select(records_dict)}) |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.